1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names
[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names
[(int) END_BUILTINS
] =
69 #include "builtins.def"
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls
[(int) END_BUILTINS
];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls
[(int) END_BUILTINS
];
81 static const char *c_getstr (tree
);
82 static rtx
c_readstr (const char *, enum machine_mode
);
83 static int target_char_cast (tree
, char *);
84 static rtx
get_memory_rtx (tree
, tree
);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx
result_vector (int, rtx
);
90 static void expand_builtin_update_setjmp_buf (rtx
);
91 static void expand_builtin_prefetch (tree
);
92 static rtx
expand_builtin_apply_args (void);
93 static rtx
expand_builtin_apply_args_1 (void);
94 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
95 static void expand_builtin_return (rtx
);
96 static enum type_class
type_to_class (tree
);
97 static rtx
expand_builtin_classify_type (tree
);
98 static void expand_errno_check (tree
, rtx
);
99 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
103 static rtx
expand_builtin_sincos (tree
);
104 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
105 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
106 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
107 static rtx
expand_builtin_args_info (tree
);
108 static rtx
expand_builtin_next_arg (void);
109 static rtx
expand_builtin_va_start (tree
);
110 static rtx
expand_builtin_va_end (tree
);
111 static rtx
expand_builtin_va_copy (tree
);
112 static rtx
expand_builtin_memchr (tree
, rtx
, enum machine_mode
);
113 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
114 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
116 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
117 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
119 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
121 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
123 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
124 enum machine_mode
, int);
125 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
126 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
127 enum machine_mode
, int);
128 static rtx
expand_builtin_bcopy (tree
, int);
129 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
130 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
131 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
132 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
133 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
136 static rtx
expand_builtin_bzero (tree
);
137 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
139 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
140 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
141 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
142 static rtx
expand_builtin_alloca (tree
, rtx
);
143 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
144 static rtx
expand_builtin_frame_address (tree
, tree
);
145 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
146 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
147 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
148 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
149 static tree
stabilize_va_list (tree
, int);
150 static rtx
expand_builtin_expect (tree
, rtx
);
151 static tree
fold_builtin_constant_p (tree
);
152 static tree
fold_builtin_expect (tree
, tree
);
153 static tree
fold_builtin_classify_type (tree
);
154 static tree
fold_builtin_strlen (tree
);
155 static tree
fold_builtin_inf (tree
, int);
156 static tree
fold_builtin_nan (tree
, tree
, int);
157 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
158 static bool validate_arg (const_tree
, enum tree_code code
);
159 static bool integer_valued_real_p (tree
);
160 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
161 static bool readonly_data_expr (tree
);
162 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
163 static rtx
expand_builtin_signbit (tree
, rtx
);
164 static tree
fold_builtin_sqrt (tree
, tree
);
165 static tree
fold_builtin_cbrt (tree
, tree
);
166 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
168 static tree
fold_builtin_cos (tree
, tree
, tree
);
169 static tree
fold_builtin_cosh (tree
, tree
, tree
);
170 static tree
fold_builtin_tan (tree
, tree
);
171 static tree
fold_builtin_trunc (tree
, tree
);
172 static tree
fold_builtin_floor (tree
, tree
);
173 static tree
fold_builtin_ceil (tree
, tree
);
174 static tree
fold_builtin_round (tree
, tree
);
175 static tree
fold_builtin_int_roundingfn (tree
, tree
);
176 static tree
fold_builtin_bitop (tree
, tree
);
177 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
178 static tree
fold_builtin_strchr (tree
, tree
, tree
);
179 static tree
fold_builtin_memchr (tree
, tree
, tree
, tree
);
180 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
181 static tree
fold_builtin_strcmp (tree
, tree
);
182 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
183 static tree
fold_builtin_signbit (tree
, tree
);
184 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
185 static tree
fold_builtin_isascii (tree
);
186 static tree
fold_builtin_toascii (tree
);
187 static tree
fold_builtin_isdigit (tree
);
188 static tree
fold_builtin_fabs (tree
, tree
);
189 static tree
fold_builtin_abs (tree
, tree
);
190 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
192 static tree
fold_builtin_n (tree
, tree
*, int, bool);
193 static tree
fold_builtin_0 (tree
, bool);
194 static tree
fold_builtin_1 (tree
, tree
, bool);
195 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
196 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
197 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
198 static tree
fold_builtin_varargs (tree
, tree
, bool);
200 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
201 static tree
fold_builtin_strstr (tree
, tree
, tree
);
202 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
203 static tree
fold_builtin_strcat (tree
, tree
);
204 static tree
fold_builtin_strncat (tree
, tree
, tree
);
205 static tree
fold_builtin_strspn (tree
, tree
);
206 static tree
fold_builtin_strcspn (tree
, tree
);
207 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
209 static rtx
expand_builtin_object_size (tree
);
210 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
211 enum built_in_function
);
212 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
213 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
214 static void maybe_emit_free_warning (tree
);
215 static tree
fold_builtin_object_size (tree
, tree
);
216 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
217 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
218 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
219 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
220 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
221 enum built_in_function
);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline
;
225 static unsigned HOST_WIDE_INT target_percent
;
226 static unsigned HOST_WIDE_INT target_c
;
227 static unsigned HOST_WIDE_INT target_s
;
228 static char target_percent_c
[3];
229 static char target_percent_s
[3];
230 static char target_percent_s_newline
[4];
231 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
232 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
233 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
234 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
235 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
236 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
237 static tree
do_mpfr_sincos (tree
, tree
, tree
);
238 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
239 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
240 const REAL_VALUE_TYPE
*, bool);
241 static tree
do_mpfr_remquo (tree
, tree
, tree
);
242 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node
)
250 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
251 if (strncmp (name
, "__builtin_", 10) == 0)
253 if (strncmp (name
, "__sync_", 7) == 0)
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp
, unsigned int align
, unsigned int max_align
)
268 if (handled_component_p (exp
))
270 HOST_WIDE_INT bitsize
, bitpos
;
272 enum machine_mode mode
;
273 int unsignedp
, volatilep
;
275 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
276 &mode
, &unsignedp
, &volatilep
, true);
278 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
283 if (TREE_CODE (offset
) == PLUS_EXPR
)
285 next_offset
= TREE_OPERAND (offset
, 0);
286 offset
= TREE_OPERAND (offset
, 1);
290 if (host_integerp (offset
, 1))
292 /* Any overflow in calculating offset_bits won't change
295 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
298 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
300 else if (TREE_CODE (offset
) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset
, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
310 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
314 inner
= MIN (inner
, BITS_PER_UNIT
);
317 offset
= next_offset
;
321 align
= MIN (inner
, DECL_ALIGN (exp
));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp
))
324 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
326 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp
) == INDIRECT_REF
)
328 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
330 align
= MIN (align
, inner
);
331 return MIN (align
, max_align
);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp
, unsigned int max_align
)
345 unsigned int align
, inner
;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize
&& flag_tree_ter
))
351 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
354 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
355 align
= MIN (align
, max_align
);
359 switch (TREE_CODE (exp
))
362 exp
= TREE_OPERAND (exp
, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
366 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
367 align
= MIN (inner
, max_align
);
370 case POINTER_PLUS_EXPR
:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
374 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
377 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
378 & (max_align
/ BITS_PER_UNIT
- 1))
382 exp
= TREE_OPERAND (exp
, 0);
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp
, 0), align
, max_align
);
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
412 c_strlen (tree src
, int only_value
)
415 HOST_WIDE_INT offset
;
420 if (TREE_CODE (src
) == COND_EXPR
421 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
425 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
426 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
427 if (tree_int_cst_equal (len1
, len2
))
431 if (TREE_CODE (src
) == COMPOUND_EXPR
432 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
433 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
435 src
= string_constant (src
, &offset_node
);
439 max
= TREE_STRING_LENGTH (src
) - 1;
440 ptr
= TREE_STRING_POINTER (src
);
442 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
449 for (i
= 0; i
< max
; i
++)
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max
), offset_node
);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node
== 0)
467 else if (! host_integerp (offset_node
, 0))
470 offset
= tree_low_cst (offset_node
, 0);
472 /* If the offset is known to be out of bounds, warn, and call strlen at
474 if (offset
< 0 || offset
> max
)
476 /* Suppress multiple warnings for propagated constant strings. */
477 if (! TREE_NO_WARNING (src
))
479 warning (0, "offset outside bounds of constant string");
480 TREE_NO_WARNING (src
) = 1;
485 /* Use strlen to search for the first zero byte. Since any strings
486 constructed with build_string will have nulls appended, we win even
487 if we get handed something like (char[4])"abcd".
489 Since OFFSET is our starting index into the string, no further
490 calculation is needed. */
491 return ssize_int (strlen (ptr
+ offset
));
494 /* Return a char pointer for a C string if it is a string constant
495 or sum of string constant and integer constant. */
502 src
= string_constant (src
, &offset_node
);
506 if (offset_node
== 0)
507 return TREE_STRING_POINTER (src
);
508 else if (!host_integerp (offset_node
, 1)
509 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
512 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
515 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
516 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
519 c_readstr (const char *str
, enum machine_mode mode
)
525 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
530 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
533 if (WORDS_BIG_ENDIAN
)
534 j
= GET_MODE_SIZE (mode
) - i
- 1;
535 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
536 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
537 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
539 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
542 ch
= (unsigned char) str
[i
];
543 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
545 return immed_double_const (c
[0], c
[1], mode
);
548 /* Cast a target constant CST to target CHAR and if that value fits into
549 host char type, return zero and put that value into variable pointed to by
553 target_char_cast (tree cst
, char *p
)
555 unsigned HOST_WIDE_INT val
, hostval
;
557 if (!host_integerp (cst
, 1)
558 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
561 val
= tree_low_cst (cst
, 1);
562 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
563 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
566 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
567 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
576 /* Similar to save_expr, but assumes that arbitrary code is not executed
577 in between the multiple evaluations. In particular, we assume that a
578 non-addressable local variable will not be modified. */
581 builtin_save_expr (tree exp
)
583 if (TREE_ADDRESSABLE (exp
) == 0
584 && (TREE_CODE (exp
) == PARM_DECL
585 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
588 return save_expr (exp
);
591 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
592 times to get the address of either a higher stack frame, or a return
593 address located within it (depending on FNDECL_CODE). */
596 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
600 #ifdef INITIAL_FRAME_ADDRESS_RTX
601 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
605 /* For a zero count with __builtin_return_address, we don't care what
606 frame address we return, because target-specific definitions will
607 override us. Therefore frame pointer elimination is OK, and using
608 the soft frame pointer is OK.
610 For a nonzero count, or a zero count with __builtin_frame_address,
611 we require a stable offset from the current frame pointer to the
612 previous one, so we must use the hard frame pointer, and
613 we must disable frame pointer elimination. */
614 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
615 tem
= frame_pointer_rtx
;
618 tem
= hard_frame_pointer_rtx
;
620 /* Tell reload not to eliminate the frame pointer. */
621 crtl
->accesses_prior_frames
= 1;
625 /* Some machines need special handling before we can access
626 arbitrary frames. For example, on the SPARC, we must first flush
627 all register windows to the stack. */
628 #ifdef SETUP_FRAME_ADDRESSES
630 SETUP_FRAME_ADDRESSES ();
633 /* On the SPARC, the return address is not in the frame, it is in a
634 register. There is no way to access it off of the current frame
635 pointer, but it can be accessed off the previous frame pointer by
636 reading the value from the register window save area. */
637 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
638 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
642 /* Scan back COUNT frames to the specified frame. */
643 for (i
= 0; i
< count
; i
++)
645 /* Assume the dynamic chain pointer is in the word that the
646 frame address points to, unless otherwise specified. */
647 #ifdef DYNAMIC_CHAIN_ADDRESS
648 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
650 tem
= memory_address (Pmode
, tem
);
651 tem
= gen_frame_mem (Pmode
, tem
);
652 tem
= copy_to_reg (tem
);
655 /* For __builtin_frame_address, return what we've got. But, on
656 the SPARC for example, we may have to add a bias. */
657 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
658 #ifdef FRAME_ADDR_RTX
659 return FRAME_ADDR_RTX (tem
);
664 /* For __builtin_return_address, get the return address from that frame. */
665 #ifdef RETURN_ADDR_RTX
666 tem
= RETURN_ADDR_RTX (count
, tem
);
668 tem
= memory_address (Pmode
,
669 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
670 tem
= gen_frame_mem (Pmode
, tem
);
675 /* Alias set used for setjmp buffer. */
676 static alias_set_type setjmp_alias_set
= -1;
678 /* Construct the leading half of a __builtin_setjmp call. Control will
679 return to RECEIVER_LABEL. This is also called directly by the SJLJ
680 exception handling code. */
683 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
685 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
689 if (setjmp_alias_set
== -1)
690 setjmp_alias_set
= new_alias_set ();
692 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
694 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
696 /* We store the frame pointer and the address of receiver_label in
697 the buffer and use the rest of it for the stack save area, which
698 is machine-dependent. */
700 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
701 set_mem_alias_set (mem
, setjmp_alias_set
);
702 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
704 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
705 set_mem_alias_set (mem
, setjmp_alias_set
);
707 emit_move_insn (validize_mem (mem
),
708 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
710 stack_save
= gen_rtx_MEM (sa_mode
,
711 plus_constant (buf_addr
,
712 2 * GET_MODE_SIZE (Pmode
)));
713 set_mem_alias_set (stack_save
, setjmp_alias_set
);
714 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
716 /* If there is further processing to do, do it. */
717 #ifdef HAVE_builtin_setjmp_setup
718 if (HAVE_builtin_setjmp_setup
)
719 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
722 /* Tell optimize_save_area_alloca that extra work is going to
723 need to go on during alloca. */
724 cfun
->calls_setjmp
= 1;
726 /* We have a nonlocal label. */
727 cfun
->has_nonlocal_label
= 1;
730 /* Construct the trailing part of a __builtin_setjmp call. This is
731 also called directly by the SJLJ exception handling code. */
734 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
736 /* Clobber the FP when we get here, so we have to make sure it's
737 marked as used by this function. */
738 emit_use (hard_frame_pointer_rtx
);
740 /* Mark the static chain as clobbered here so life information
741 doesn't get messed up for it. */
742 emit_clobber (static_chain_rtx
);
744 /* Now put in the code to restore the frame pointer, and argument
745 pointer, if needed. */
746 #ifdef HAVE_nonlocal_goto
747 if (! HAVE_nonlocal_goto
)
750 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
751 /* This might change the hard frame pointer in ways that aren't
752 apparent to early optimization passes, so force a clobber. */
753 emit_clobber (hard_frame_pointer_rtx
);
756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
757 if (fixed_regs
[ARG_POINTER_REGNUM
])
759 #ifdef ELIMINABLE_REGS
761 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
763 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
764 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
765 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
768 if (i
== ARRAY_SIZE (elim_regs
))
771 /* Now restore our arg pointer from the address at which it
772 was saved in our stack frame. */
773 emit_move_insn (crtl
->args
.internal_arg_pointer
,
774 copy_to_reg (get_arg_pointer_save_area ()));
779 #ifdef HAVE_builtin_setjmp_receiver
780 if (HAVE_builtin_setjmp_receiver
)
781 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
784 #ifdef HAVE_nonlocal_goto_receiver
785 if (HAVE_nonlocal_goto_receiver
)
786 emit_insn (gen_nonlocal_goto_receiver ());
791 /* We must not allow the code we just generated to be reordered by
792 scheduling. Specifically, the update of the frame pointer must
793 happen immediately, not later. */
794 emit_insn (gen_blockage ());
797 /* __builtin_longjmp is passed a pointer to an array of five words (not
798 all will be used on all machines). It operates similarly to the C
799 library function of the same name, but is more efficient. Much of
800 the code below is copied from the handling of non-local gotos. */
803 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
805 rtx fp
, lab
, stack
, insn
, last
;
806 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
808 /* DRAP is needed for stack realign if longjmp is expanded to current
810 if (SUPPORTS_STACK_ALIGNMENT
)
811 crtl
->need_drap
= true;
813 if (setjmp_alias_set
== -1)
814 setjmp_alias_set
= new_alias_set ();
816 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
818 buf_addr
= force_reg (Pmode
, buf_addr
);
820 /* We used to store value in static_chain_rtx, but that fails if pointers
821 are smaller than integers. We instead require that the user must pass
822 a second argument of 1, because that is what builtin_setjmp will
823 return. This also makes EH slightly more efficient, since we are no
824 longer copying around a value that we don't care about. */
825 gcc_assert (value
== const1_rtx
);
827 last
= get_last_insn ();
828 #ifdef HAVE_builtin_longjmp
829 if (HAVE_builtin_longjmp
)
830 emit_insn (gen_builtin_longjmp (buf_addr
));
834 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
835 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
836 GET_MODE_SIZE (Pmode
)));
838 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
839 2 * GET_MODE_SIZE (Pmode
)));
840 set_mem_alias_set (fp
, setjmp_alias_set
);
841 set_mem_alias_set (lab
, setjmp_alias_set
);
842 set_mem_alias_set (stack
, setjmp_alias_set
);
844 /* Pick up FP, label, and SP from the block and jump. This code is
845 from expand_goto in stmt.c; see there for detailed comments. */
846 #ifdef HAVE_nonlocal_goto
847 if (HAVE_nonlocal_goto
)
848 /* We have to pass a value to the nonlocal_goto pattern that will
849 get copied into the static_chain pointer, but it does not matter
850 what that value is, because builtin_setjmp does not use it. */
851 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
855 lab
= copy_to_reg (lab
);
857 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
858 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
860 emit_move_insn (hard_frame_pointer_rtx
, fp
);
861 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
863 emit_use (hard_frame_pointer_rtx
);
864 emit_use (stack_pointer_rtx
);
865 emit_indirect_jump (lab
);
869 /* Search backwards and mark the jump insn as a non-local goto.
870 Note that this precludes the use of __builtin_longjmp to a
871 __builtin_setjmp target in the same function. However, we've
872 already cautioned the user that these functions are for
873 internal exception handling use only. */
874 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
876 gcc_assert (insn
!= last
);
880 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
883 else if (CALL_P (insn
))
888 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
889 and the address of the save area. */
892 expand_builtin_nonlocal_goto (tree exp
)
894 tree t_label
, t_save_area
;
895 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
897 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
900 t_label
= CALL_EXPR_ARG (exp
, 0);
901 t_save_area
= CALL_EXPR_ARG (exp
, 1);
903 r_label
= expand_normal (t_label
);
904 r_label
= convert_memory_address (Pmode
, r_label
);
905 r_save_area
= expand_normal (t_save_area
);
906 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
907 /* Copy the address of the save location to a register just in case it was based
908 on the frame pointer. */
909 r_save_area
= copy_to_reg (r_save_area
);
910 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
911 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
912 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
914 crtl
->has_nonlocal_goto
= 1;
916 #ifdef HAVE_nonlocal_goto
917 /* ??? We no longer need to pass the static chain value, afaik. */
918 if (HAVE_nonlocal_goto
)
919 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
923 r_label
= copy_to_reg (r_label
);
925 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
926 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
928 /* Restore frame pointer for containing function.
929 This sets the actual hard register used for the frame pointer
930 to the location of the function's incoming static chain info.
931 The non-local goto handler will then adjust it to contain the
932 proper value and reload the argument pointer, if needed. */
933 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
934 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
936 /* USE of hard_frame_pointer_rtx added for consistency;
937 not clear if really needed. */
938 emit_use (hard_frame_pointer_rtx
);
939 emit_use (stack_pointer_rtx
);
941 /* If the architecture is using a GP register, we must
942 conservatively assume that the target function makes use of it.
943 The prologue of functions with nonlocal gotos must therefore
944 initialize the GP register to the appropriate value, and we
945 must then make sure that this value is live at the point
946 of the jump. (Note that this doesn't necessarily apply
947 to targets with a nonlocal_goto pattern; they are free
948 to implement it in their own way. Note also that this is
949 a no-op if the GP register is a global invariant.) */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
951 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
952 emit_use (pic_offset_table_rtx
);
954 emit_indirect_jump (r_label
);
957 /* Search backwards to the jump insn and mark it as a
959 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
963 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
966 else if (CALL_P (insn
))
973 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
974 (not all will be used on all machines) that was passed to __builtin_setjmp.
975 It updates the stack pointer in that block to correspond to the current
979 expand_builtin_update_setjmp_buf (rtx buf_addr
)
981 enum machine_mode sa_mode
= Pmode
;
985 #ifdef HAVE_save_stack_nonlocal
986 if (HAVE_save_stack_nonlocal
)
987 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
989 #ifdef STACK_SAVEAREA_MODE
990 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
994 = gen_rtx_MEM (sa_mode
,
997 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
1001 emit_insn (gen_setjmp ());
1004 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
1007 /* Expand a call to __builtin_prefetch. For a target that does not support
1008 data prefetch, evaluate the memory address argument in case it has side
1012 expand_builtin_prefetch (tree exp
)
1014 tree arg0
, arg1
, arg2
;
1018 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1021 arg0
= CALL_EXPR_ARG (exp
, 0);
1023 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1024 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1026 nargs
= call_expr_nargs (exp
);
1028 arg1
= CALL_EXPR_ARG (exp
, 1);
1030 arg1
= integer_zero_node
;
1032 arg2
= CALL_EXPR_ARG (exp
, 2);
1034 arg2
= build_int_cst (NULL_TREE
, 3);
1036 /* Argument 0 is an address. */
1037 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1039 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1040 if (TREE_CODE (arg1
) != INTEGER_CST
)
1042 error ("second argument to %<__builtin_prefetch%> must be a constant");
1043 arg1
= integer_zero_node
;
1045 op1
= expand_normal (arg1
);
1046 /* Argument 1 must be either zero or one. */
1047 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1049 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1054 /* Argument 2 (locality) must be a compile-time constant int. */
1055 if (TREE_CODE (arg2
) != INTEGER_CST
)
1057 error ("third argument to %<__builtin_prefetch%> must be a constant");
1058 arg2
= integer_zero_node
;
1060 op2
= expand_normal (arg2
);
1061 /* Argument 2 must be 0, 1, 2, or 3. */
1062 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1064 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1068 #ifdef HAVE_prefetch
1071 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1073 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1074 || (GET_MODE (op0
) != Pmode
))
1076 op0
= convert_memory_address (Pmode
, op0
);
1077 op0
= force_reg (Pmode
, op0
);
1079 emit_insn (gen_prefetch (op0
, op1
, op2
));
1083 /* Don't do anything with direct references to volatile memory, but
1084 generate code to handle other side effects. */
1085 if (!MEM_P (op0
) && side_effects_p (op0
))
1089 /* Get a MEM rtx for expression EXP which is the address of an operand
1090 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1091 the maximum length of the block of memory that might be accessed or
1095 get_memory_rtx (tree exp
, tree len
)
1097 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1098 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1100 /* Get an expression we can use to find the attributes to assign to MEM.
1101 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1102 we can. First remove any nops. */
1103 while (CONVERT_EXPR_P (exp
)
1104 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1105 exp
= TREE_OPERAND (exp
, 0);
1107 if (TREE_CODE (exp
) == ADDR_EXPR
)
1108 exp
= TREE_OPERAND (exp
, 0);
1109 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1110 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1114 /* Honor attributes derived from exp, except for the alias set
1115 (as builtin stringops may alias with anything) and the size
1116 (as stringops may access multiple array elements). */
1119 set_mem_attributes (mem
, exp
, 0);
1121 /* Allow the string and memory builtins to overflow from one
1122 field into another, see http://gcc.gnu.org/PR23561.
1123 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1124 memory accessed by the string or memory builtin will fit
1125 within the field. */
1126 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1128 tree mem_expr
= MEM_EXPR (mem
);
1129 HOST_WIDE_INT offset
= -1, length
= -1;
1132 while (TREE_CODE (inner
) == ARRAY_REF
1133 || CONVERT_EXPR_P (inner
)
1134 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1135 || TREE_CODE (inner
) == SAVE_EXPR
)
1136 inner
= TREE_OPERAND (inner
, 0);
1138 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1140 if (MEM_OFFSET (mem
)
1141 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1142 offset
= INTVAL (MEM_OFFSET (mem
));
1144 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1145 length
= tree_low_cst (len
, 0);
1147 while (TREE_CODE (inner
) == COMPONENT_REF
)
1149 tree field
= TREE_OPERAND (inner
, 1);
1150 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1151 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1153 /* Bitfields are generally not byte-addressable. */
1154 gcc_assert (!DECL_BIT_FIELD (field
)
1155 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1156 % BITS_PER_UNIT
) == 0
1157 && host_integerp (DECL_SIZE (field
), 0)
1158 && (TREE_INT_CST_LOW (DECL_SIZE (field
))
1159 % BITS_PER_UNIT
) == 0));
1161 /* If we can prove that the memory starting at XEXP (mem, 0) and
1162 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1163 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1164 fields without DECL_SIZE_UNIT like flexible array members. */
1166 && DECL_SIZE_UNIT (field
)
1167 && host_integerp (DECL_SIZE_UNIT (field
), 0))
1170 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field
));
1173 && offset
+ length
<= size
)
1178 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1179 offset
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
1180 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1188 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1189 inner
= TREE_OPERAND (inner
, 0);
1192 if (mem_expr
== NULL
)
1194 if (mem_expr
!= MEM_EXPR (mem
))
1196 set_mem_expr (mem
, mem_expr
);
1197 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1200 set_mem_alias_set (mem
, 0);
1201 set_mem_size (mem
, NULL_RTX
);
1207 /* Built-in functions to perform an untyped call and return. */
1209 /* For each register that may be used for calling a function, this
1210 gives a mode used to copy the register's value. VOIDmode indicates
1211 the register is not used for calling a function. If the machine
1212 has register windows, this gives only the outbound registers.
1213 INCOMING_REGNO gives the corresponding inbound register. */
1214 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1216 /* For each register that may be used for returning values, this gives
1217 a mode used to copy the register's value. VOIDmode indicates the
1218 register is not used for returning values. If the machine has
1219 register windows, this gives only the outbound registers.
1220 INCOMING_REGNO gives the corresponding inbound register. */
1221 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1223 /* For each register that may be used for calling a function, this
1224 gives the offset of that register into the block returned by
1225 __builtin_apply_args. 0 indicates that the register is not
1226 used for calling a function. */
1227 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1229 /* Return the size required for the block returned by __builtin_apply_args,
1230 and initialize apply_args_mode. */
1233 apply_args_size (void)
1235 static int size
= -1;
1238 enum machine_mode mode
;
1240 /* The values computed by this function never change. */
1243 /* The first value is the incoming arg-pointer. */
1244 size
= GET_MODE_SIZE (Pmode
);
1246 /* The second value is the structure value address unless this is
1247 passed as an "invisible" first argument. */
1248 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1249 size
+= GET_MODE_SIZE (Pmode
);
1251 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1252 if (FUNCTION_ARG_REGNO_P (regno
))
1254 mode
= reg_raw_mode
[regno
];
1256 gcc_assert (mode
!= VOIDmode
);
1258 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1259 if (size
% align
!= 0)
1260 size
= CEIL (size
, align
) * align
;
1261 apply_args_reg_offset
[regno
] = size
;
1262 size
+= GET_MODE_SIZE (mode
);
1263 apply_args_mode
[regno
] = mode
;
1267 apply_args_mode
[regno
] = VOIDmode
;
1268 apply_args_reg_offset
[regno
] = 0;
1274 /* Return the size required for the block returned by __builtin_apply,
1275 and initialize apply_result_mode. */
1278 apply_result_size (void)
1280 static int size
= -1;
1282 enum machine_mode mode
;
1284 /* The values computed by this function never change. */
1289 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1290 if (FUNCTION_VALUE_REGNO_P (regno
))
1292 mode
= reg_raw_mode
[regno
];
1294 gcc_assert (mode
!= VOIDmode
);
1296 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1297 if (size
% align
!= 0)
1298 size
= CEIL (size
, align
) * align
;
1299 size
+= GET_MODE_SIZE (mode
);
1300 apply_result_mode
[regno
] = mode
;
1303 apply_result_mode
[regno
] = VOIDmode
;
1305 /* Allow targets that use untyped_call and untyped_return to override
1306 the size so that machine-specific information can be stored here. */
1307 #ifdef APPLY_RESULT_SIZE
1308 size
= APPLY_RESULT_SIZE
;
1314 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1315 /* Create a vector describing the result block RESULT. If SAVEP is true,
1316 the result block is used to save the values; otherwise it is used to
1317 restore the values. */
1320 result_vector (int savep
, rtx result
)
1322 int regno
, size
, align
, nelts
;
1323 enum machine_mode mode
;
1325 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1328 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1329 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1331 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1332 if (size
% align
!= 0)
1333 size
= CEIL (size
, align
) * align
;
1334 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1335 mem
= adjust_address (result
, mode
, size
);
1336 savevec
[nelts
++] = (savep
1337 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1338 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1339 size
+= GET_MODE_SIZE (mode
);
1341 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1343 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1345 /* Save the state required to perform an untyped call with the same
1346 arguments as were passed to the current function. */
1349 expand_builtin_apply_args_1 (void)
1352 int size
, align
, regno
;
1353 enum machine_mode mode
;
1354 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1356 /* Create a block where the arg-pointer, structure value address,
1357 and argument registers can be saved. */
1358 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1360 /* Walk past the arg-pointer and structure value address. */
1361 size
= GET_MODE_SIZE (Pmode
);
1362 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1363 size
+= GET_MODE_SIZE (Pmode
);
1365 /* Save each register used in calling a function to the block. */
1366 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1367 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1369 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1370 if (size
% align
!= 0)
1371 size
= CEIL (size
, align
) * align
;
1373 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1375 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1376 size
+= GET_MODE_SIZE (mode
);
1379 /* Save the arg pointer to the block. */
1380 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1381 #ifdef STACK_GROWS_DOWNWARD
1382 /* We need the pointer as the caller actually passed them to us, not
1383 as we might have pretended they were passed. Make sure it's a valid
1384 operand, as emit_move_insn isn't expected to handle a PLUS. */
1386 = force_operand (plus_constant (tem
, crtl
->args
.pretend_args_size
),
1389 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1391 size
= GET_MODE_SIZE (Pmode
);
1393 /* Save the structure value address unless this is passed as an
1394 "invisible" first argument. */
1395 if (struct_incoming_value
)
1397 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1398 copy_to_reg (struct_incoming_value
));
1399 size
+= GET_MODE_SIZE (Pmode
);
1402 /* Return the address of the block. */
1403 return copy_addr_to_reg (XEXP (registers
, 0));
1406 /* __builtin_apply_args returns block of memory allocated on
1407 the stack into which is stored the arg pointer, structure
1408 value address, static chain, and all the registers that might
1409 possibly be used in performing a function call. The code is
1410 moved to the start of the function so the incoming values are
1414 expand_builtin_apply_args (void)
1416 /* Don't do __builtin_apply_args more than once in a function.
1417 Save the result of the first call and reuse it. */
1418 if (apply_args_value
!= 0)
1419 return apply_args_value
;
1421 /* When this function is called, it means that registers must be
1422 saved on entry to this function. So we migrate the
1423 call to the first insn of this function. */
1428 temp
= expand_builtin_apply_args_1 ();
1432 apply_args_value
= temp
;
1434 /* Put the insns after the NOTE that starts the function.
1435 If this is inside a start_sequence, make the outer-level insn
1436 chain current, so the code is placed at the start of the
1437 function. If internal_arg_pointer is a non-virtual pseudo,
1438 it needs to be placed after the function that initializes
1440 push_topmost_sequence ();
1441 if (REG_P (crtl
->args
.internal_arg_pointer
)
1442 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1443 emit_insn_before (seq
, parm_birth_insn
);
1445 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1446 pop_topmost_sequence ();
1451 /* Perform an untyped call and save the state required to perform an
1452 untyped return of whatever value was returned by the given function. */
1455 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1457 int size
, align
, regno
;
1458 enum machine_mode mode
;
1459 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1460 rtx old_stack_level
= 0;
1461 rtx call_fusage
= 0;
1462 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1464 arguments
= convert_memory_address (Pmode
, arguments
);
1466 /* Create a block where the return registers can be saved. */
1467 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1469 /* Fetch the arg pointer from the ARGUMENTS block. */
1470 incoming_args
= gen_reg_rtx (Pmode
);
1471 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1472 #ifndef STACK_GROWS_DOWNWARD
1473 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1474 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1477 /* Push a new argument block and copy the arguments. Do not allow
1478 the (potential) memcpy call below to interfere with our stack
1480 do_pending_stack_adjust ();
1483 /* Save the stack with nonlocal if available. */
1484 #ifdef HAVE_save_stack_nonlocal
1485 if (HAVE_save_stack_nonlocal
)
1486 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1489 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1491 /* Allocate a block of memory onto the stack and copy the memory
1492 arguments to the outgoing arguments address. */
1493 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1495 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1496 may have already set current_function_calls_alloca to true.
1497 current_function_calls_alloca won't be set if argsize is zero,
1498 so we have to guarantee need_drap is true here. */
1499 if (SUPPORTS_STACK_ALIGNMENT
)
1500 crtl
->need_drap
= true;
1502 dest
= virtual_outgoing_args_rtx
;
1503 #ifndef STACK_GROWS_DOWNWARD
1504 if (GET_CODE (argsize
) == CONST_INT
)
1505 dest
= plus_constant (dest
, -INTVAL (argsize
));
1507 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1509 dest
= gen_rtx_MEM (BLKmode
, dest
);
1510 set_mem_align (dest
, PARM_BOUNDARY
);
1511 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1512 set_mem_align (src
, PARM_BOUNDARY
);
1513 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1515 /* Refer to the argument block. */
1517 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1518 set_mem_align (arguments
, PARM_BOUNDARY
);
1520 /* Walk past the arg-pointer and structure value address. */
1521 size
= GET_MODE_SIZE (Pmode
);
1523 size
+= GET_MODE_SIZE (Pmode
);
1525 /* Restore each of the registers previously saved. Make USE insns
1526 for each of these registers for use in making the call. */
1527 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1528 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1530 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1531 if (size
% align
!= 0)
1532 size
= CEIL (size
, align
) * align
;
1533 reg
= gen_rtx_REG (mode
, regno
);
1534 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1535 use_reg (&call_fusage
, reg
);
1536 size
+= GET_MODE_SIZE (mode
);
1539 /* Restore the structure value address unless this is passed as an
1540 "invisible" first argument. */
1541 size
= GET_MODE_SIZE (Pmode
);
1544 rtx value
= gen_reg_rtx (Pmode
);
1545 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1546 emit_move_insn (struct_value
, value
);
1547 if (REG_P (struct_value
))
1548 use_reg (&call_fusage
, struct_value
);
1549 size
+= GET_MODE_SIZE (Pmode
);
1552 /* All arguments and registers used for the call are set up by now! */
1553 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1555 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1556 and we don't want to load it into a register as an optimization,
1557 because prepare_call_address already did it if it should be done. */
1558 if (GET_CODE (function
) != SYMBOL_REF
)
1559 function
= memory_address (FUNCTION_MODE
, function
);
1561 /* Generate the actual call instruction and save the return value. */
1562 #ifdef HAVE_untyped_call
1563 if (HAVE_untyped_call
)
1564 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1565 result
, result_vector (1, result
)));
1568 #ifdef HAVE_call_value
1569 if (HAVE_call_value
)
1573 /* Locate the unique return register. It is not possible to
1574 express a call that sets more than one return register using
1575 call_value; use untyped_call for that. In fact, untyped_call
1576 only needs to save the return registers in the given block. */
1577 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1578 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1580 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1582 valreg
= gen_rtx_REG (mode
, regno
);
1585 emit_call_insn (GEN_CALL_VALUE (valreg
,
1586 gen_rtx_MEM (FUNCTION_MODE
, function
),
1587 const0_rtx
, NULL_RTX
, const0_rtx
));
1589 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1595 /* Find the CALL insn we just emitted, and attach the register usage
1597 call_insn
= last_call_insn ();
1598 add_function_usage_to (call_insn
, call_fusage
);
1600 /* Restore the stack. */
1601 #ifdef HAVE_save_stack_nonlocal
1602 if (HAVE_save_stack_nonlocal
)
1603 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1606 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1610 /* Return the address of the result block. */
1611 result
= copy_addr_to_reg (XEXP (result
, 0));
1612 return convert_memory_address (ptr_mode
, result
);
1615 /* Perform an untyped return. */
1618 expand_builtin_return (rtx result
)
1620 int size
, align
, regno
;
1621 enum machine_mode mode
;
1623 rtx call_fusage
= 0;
1625 result
= convert_memory_address (Pmode
, result
);
1627 apply_result_size ();
1628 result
= gen_rtx_MEM (BLKmode
, result
);
1630 #ifdef HAVE_untyped_return
1631 if (HAVE_untyped_return
)
1633 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1639 /* Restore the return value and note that each value is used. */
1641 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1642 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1644 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1645 if (size
% align
!= 0)
1646 size
= CEIL (size
, align
) * align
;
1647 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1648 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1650 push_to_sequence (call_fusage
);
1652 call_fusage
= get_insns ();
1654 size
+= GET_MODE_SIZE (mode
);
1657 /* Put the USE insns before the return. */
1658 emit_insn (call_fusage
);
1660 /* Return whatever values was restored by jumping directly to the end
1662 expand_naked_return ();
1665 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1667 static enum type_class
1668 type_to_class (tree type
)
1670 switch (TREE_CODE (type
))
1672 case VOID_TYPE
: return void_type_class
;
1673 case INTEGER_TYPE
: return integer_type_class
;
1674 case ENUMERAL_TYPE
: return enumeral_type_class
;
1675 case BOOLEAN_TYPE
: return boolean_type_class
;
1676 case POINTER_TYPE
: return pointer_type_class
;
1677 case REFERENCE_TYPE
: return reference_type_class
;
1678 case OFFSET_TYPE
: return offset_type_class
;
1679 case REAL_TYPE
: return real_type_class
;
1680 case COMPLEX_TYPE
: return complex_type_class
;
1681 case FUNCTION_TYPE
: return function_type_class
;
1682 case METHOD_TYPE
: return method_type_class
;
1683 case RECORD_TYPE
: return record_type_class
;
1685 case QUAL_UNION_TYPE
: return union_type_class
;
1686 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1687 ? string_type_class
: array_type_class
);
1688 case LANG_TYPE
: return lang_type_class
;
1689 default: return no_type_class
;
1693 /* Expand a call EXP to __builtin_classify_type. */
1696 expand_builtin_classify_type (tree exp
)
1698 if (call_expr_nargs (exp
))
1699 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1700 return GEN_INT (no_type_class
);
1703 /* This helper macro, meant to be used in mathfn_built_in below,
1704 determines which among a set of three builtin math functions is
1705 appropriate for a given type mode. The `F' and `L' cases are
1706 automatically generated from the `double' case. */
1707 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1708 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1709 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1710 fcodel = BUILT_IN_MATHFN##L ; break;
1711 /* Similar to above, but appends _R after any F/L suffix. */
1712 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1713 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1714 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1715 fcodel = BUILT_IN_MATHFN##L_R ; break;
1717 /* Return mathematic function equivalent to FN but operating directly
1718 on TYPE, if available. If IMPLICIT is true find the function in
1719 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1720 can't do the conversion, return zero. */
1723 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit
)
1725 tree
const *const fn_arr
1726 = implicit
? implicit_built_in_decls
: built_in_decls
;
1727 enum built_in_function fcode
, fcodef
, fcodel
;
1731 CASE_MATHFN (BUILT_IN_ACOS
)
1732 CASE_MATHFN (BUILT_IN_ACOSH
)
1733 CASE_MATHFN (BUILT_IN_ASIN
)
1734 CASE_MATHFN (BUILT_IN_ASINH
)
1735 CASE_MATHFN (BUILT_IN_ATAN
)
1736 CASE_MATHFN (BUILT_IN_ATAN2
)
1737 CASE_MATHFN (BUILT_IN_ATANH
)
1738 CASE_MATHFN (BUILT_IN_CBRT
)
1739 CASE_MATHFN (BUILT_IN_CEIL
)
1740 CASE_MATHFN (BUILT_IN_CEXPI
)
1741 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1742 CASE_MATHFN (BUILT_IN_COS
)
1743 CASE_MATHFN (BUILT_IN_COSH
)
1744 CASE_MATHFN (BUILT_IN_DREM
)
1745 CASE_MATHFN (BUILT_IN_ERF
)
1746 CASE_MATHFN (BUILT_IN_ERFC
)
1747 CASE_MATHFN (BUILT_IN_EXP
)
1748 CASE_MATHFN (BUILT_IN_EXP10
)
1749 CASE_MATHFN (BUILT_IN_EXP2
)
1750 CASE_MATHFN (BUILT_IN_EXPM1
)
1751 CASE_MATHFN (BUILT_IN_FABS
)
1752 CASE_MATHFN (BUILT_IN_FDIM
)
1753 CASE_MATHFN (BUILT_IN_FLOOR
)
1754 CASE_MATHFN (BUILT_IN_FMA
)
1755 CASE_MATHFN (BUILT_IN_FMAX
)
1756 CASE_MATHFN (BUILT_IN_FMIN
)
1757 CASE_MATHFN (BUILT_IN_FMOD
)
1758 CASE_MATHFN (BUILT_IN_FREXP
)
1759 CASE_MATHFN (BUILT_IN_GAMMA
)
1760 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1761 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1762 CASE_MATHFN (BUILT_IN_HYPOT
)
1763 CASE_MATHFN (BUILT_IN_ILOGB
)
1764 CASE_MATHFN (BUILT_IN_INF
)
1765 CASE_MATHFN (BUILT_IN_ISINF
)
1766 CASE_MATHFN (BUILT_IN_J0
)
1767 CASE_MATHFN (BUILT_IN_J1
)
1768 CASE_MATHFN (BUILT_IN_JN
)
1769 CASE_MATHFN (BUILT_IN_LCEIL
)
1770 CASE_MATHFN (BUILT_IN_LDEXP
)
1771 CASE_MATHFN (BUILT_IN_LFLOOR
)
1772 CASE_MATHFN (BUILT_IN_LGAMMA
)
1773 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1774 CASE_MATHFN (BUILT_IN_LLCEIL
)
1775 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1776 CASE_MATHFN (BUILT_IN_LLRINT
)
1777 CASE_MATHFN (BUILT_IN_LLROUND
)
1778 CASE_MATHFN (BUILT_IN_LOG
)
1779 CASE_MATHFN (BUILT_IN_LOG10
)
1780 CASE_MATHFN (BUILT_IN_LOG1P
)
1781 CASE_MATHFN (BUILT_IN_LOG2
)
1782 CASE_MATHFN (BUILT_IN_LOGB
)
1783 CASE_MATHFN (BUILT_IN_LRINT
)
1784 CASE_MATHFN (BUILT_IN_LROUND
)
1785 CASE_MATHFN (BUILT_IN_MODF
)
1786 CASE_MATHFN (BUILT_IN_NAN
)
1787 CASE_MATHFN (BUILT_IN_NANS
)
1788 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1789 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1790 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1791 CASE_MATHFN (BUILT_IN_POW
)
1792 CASE_MATHFN (BUILT_IN_POWI
)
1793 CASE_MATHFN (BUILT_IN_POW10
)
1794 CASE_MATHFN (BUILT_IN_REMAINDER
)
1795 CASE_MATHFN (BUILT_IN_REMQUO
)
1796 CASE_MATHFN (BUILT_IN_RINT
)
1797 CASE_MATHFN (BUILT_IN_ROUND
)
1798 CASE_MATHFN (BUILT_IN_SCALB
)
1799 CASE_MATHFN (BUILT_IN_SCALBLN
)
1800 CASE_MATHFN (BUILT_IN_SCALBN
)
1801 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1802 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1803 CASE_MATHFN (BUILT_IN_SIN
)
1804 CASE_MATHFN (BUILT_IN_SINCOS
)
1805 CASE_MATHFN (BUILT_IN_SINH
)
1806 CASE_MATHFN (BUILT_IN_SQRT
)
1807 CASE_MATHFN (BUILT_IN_TAN
)
1808 CASE_MATHFN (BUILT_IN_TANH
)
1809 CASE_MATHFN (BUILT_IN_TGAMMA
)
1810 CASE_MATHFN (BUILT_IN_TRUNC
)
1811 CASE_MATHFN (BUILT_IN_Y0
)
1812 CASE_MATHFN (BUILT_IN_Y1
)
1813 CASE_MATHFN (BUILT_IN_YN
)
1819 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1820 return fn_arr
[fcode
];
1821 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1822 return fn_arr
[fcodef
];
1823 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1824 return fn_arr
[fcodel
];
1829 /* Like mathfn_built_in_1(), but always use the implicit array. */
1832 mathfn_built_in (tree type
, enum built_in_function fn
)
1834 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1837 /* If errno must be maintained, expand the RTL to check if the result,
1838 TARGET, of a built-in function call, EXP, is NaN, and if so set
1842 expand_errno_check (tree exp
, rtx target
)
1844 rtx lab
= gen_label_rtx ();
1846 /* Test the result; if it is NaN, set errno=EDOM because
1847 the argument was not in the domain. */
1848 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1852 /* If this built-in doesn't throw an exception, set errno directly. */
1853 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1855 #ifdef GEN_ERRNO_RTX
1856 rtx errno_rtx
= GEN_ERRNO_RTX
;
1859 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1861 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1867 /* Make sure the library call isn't expanded as a tail call. */
1868 CALL_EXPR_TAILCALL (exp
) = 0;
1870 /* We can't set errno=EDOM directly; let the library call do it.
1871 Pop the arguments right away in case the call gets deleted. */
1873 expand_call (exp
, target
, 0);
1878 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1879 Return NULL_RTX if a normal call should be emitted rather than expanding
1880 the function in-line. EXP is the expression that is a call to the builtin
1881 function; if convenient, the result should be placed in TARGET.
1882 SUBTARGET may be used as the target for computing one of EXP's operands. */
1885 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1887 optab builtin_optab
;
1888 rtx op0
, insns
, before_call
;
1889 tree fndecl
= get_callee_fndecl (exp
);
1890 enum machine_mode mode
;
1891 bool errno_set
= false;
1894 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1897 arg
= CALL_EXPR_ARG (exp
, 0);
1899 switch (DECL_FUNCTION_CODE (fndecl
))
1901 CASE_FLT_FN (BUILT_IN_SQRT
):
1902 errno_set
= ! tree_expr_nonnegative_p (arg
);
1903 builtin_optab
= sqrt_optab
;
1905 CASE_FLT_FN (BUILT_IN_EXP
):
1906 errno_set
= true; builtin_optab
= exp_optab
; break;
1907 CASE_FLT_FN (BUILT_IN_EXP10
):
1908 CASE_FLT_FN (BUILT_IN_POW10
):
1909 errno_set
= true; builtin_optab
= exp10_optab
; break;
1910 CASE_FLT_FN (BUILT_IN_EXP2
):
1911 errno_set
= true; builtin_optab
= exp2_optab
; break;
1912 CASE_FLT_FN (BUILT_IN_EXPM1
):
1913 errno_set
= true; builtin_optab
= expm1_optab
; break;
1914 CASE_FLT_FN (BUILT_IN_LOGB
):
1915 errno_set
= true; builtin_optab
= logb_optab
; break;
1916 CASE_FLT_FN (BUILT_IN_LOG
):
1917 errno_set
= true; builtin_optab
= log_optab
; break;
1918 CASE_FLT_FN (BUILT_IN_LOG10
):
1919 errno_set
= true; builtin_optab
= log10_optab
; break;
1920 CASE_FLT_FN (BUILT_IN_LOG2
):
1921 errno_set
= true; builtin_optab
= log2_optab
; break;
1922 CASE_FLT_FN (BUILT_IN_LOG1P
):
1923 errno_set
= true; builtin_optab
= log1p_optab
; break;
1924 CASE_FLT_FN (BUILT_IN_ASIN
):
1925 builtin_optab
= asin_optab
; break;
1926 CASE_FLT_FN (BUILT_IN_ACOS
):
1927 builtin_optab
= acos_optab
; break;
1928 CASE_FLT_FN (BUILT_IN_TAN
):
1929 builtin_optab
= tan_optab
; break;
1930 CASE_FLT_FN (BUILT_IN_ATAN
):
1931 builtin_optab
= atan_optab
; break;
1932 CASE_FLT_FN (BUILT_IN_FLOOR
):
1933 builtin_optab
= floor_optab
; break;
1934 CASE_FLT_FN (BUILT_IN_CEIL
):
1935 builtin_optab
= ceil_optab
; break;
1936 CASE_FLT_FN (BUILT_IN_TRUNC
):
1937 builtin_optab
= btrunc_optab
; break;
1938 CASE_FLT_FN (BUILT_IN_ROUND
):
1939 builtin_optab
= round_optab
; break;
1940 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1941 builtin_optab
= nearbyint_optab
;
1942 if (flag_trapping_math
)
1944 /* Else fallthrough and expand as rint. */
1945 CASE_FLT_FN (BUILT_IN_RINT
):
1946 builtin_optab
= rint_optab
; break;
1951 /* Make a suitable register to place result in. */
1952 mode
= TYPE_MODE (TREE_TYPE (exp
));
1954 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1957 /* Before working hard, check whether the instruction is available. */
1958 if (optab_handler (builtin_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
1960 target
= gen_reg_rtx (mode
);
1962 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1963 need to expand the argument again. This way, we will not perform
1964 side-effects more the once. */
1965 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
1967 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
1971 /* Compute into TARGET.
1972 Set TARGET to wherever the result comes back. */
1973 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1978 expand_errno_check (exp
, target
);
1980 /* Output the entire sequence. */
1981 insns
= get_insns ();
1987 /* If we were unable to expand via the builtin, stop the sequence
1988 (without outputting the insns) and call to the library function
1989 with the stabilized argument list. */
1993 before_call
= get_last_insn ();
1995 return expand_call (exp
, target
, target
== const0_rtx
);
1998 /* Expand a call to the builtin binary math functions (pow and atan2).
1999 Return NULL_RTX if a normal call should be emitted rather than expanding the
2000 function in-line. EXP is the expression that is a call to the builtin
2001 function; if convenient, the result should be placed in TARGET.
2002 SUBTARGET may be used as the target for computing one of EXP's
2006 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2008 optab builtin_optab
;
2009 rtx op0
, op1
, insns
;
2010 int op1_type
= REAL_TYPE
;
2011 tree fndecl
= get_callee_fndecl (exp
);
2013 enum machine_mode mode
;
2014 bool errno_set
= true;
2016 switch (DECL_FUNCTION_CODE (fndecl
))
2018 CASE_FLT_FN (BUILT_IN_SCALBN
):
2019 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2020 CASE_FLT_FN (BUILT_IN_LDEXP
):
2021 op1_type
= INTEGER_TYPE
;
2026 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2029 arg0
= CALL_EXPR_ARG (exp
, 0);
2030 arg1
= CALL_EXPR_ARG (exp
, 1);
2032 switch (DECL_FUNCTION_CODE (fndecl
))
2034 CASE_FLT_FN (BUILT_IN_POW
):
2035 builtin_optab
= pow_optab
; break;
2036 CASE_FLT_FN (BUILT_IN_ATAN2
):
2037 builtin_optab
= atan2_optab
; break;
2038 CASE_FLT_FN (BUILT_IN_SCALB
):
2039 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2041 builtin_optab
= scalb_optab
; break;
2042 CASE_FLT_FN (BUILT_IN_SCALBN
):
2043 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2044 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2046 /* Fall through... */
2047 CASE_FLT_FN (BUILT_IN_LDEXP
):
2048 builtin_optab
= ldexp_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_FMOD
):
2050 builtin_optab
= fmod_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2052 CASE_FLT_FN (BUILT_IN_DREM
):
2053 builtin_optab
= remainder_optab
; break;
2058 /* Make a suitable register to place result in. */
2059 mode
= TYPE_MODE (TREE_TYPE (exp
));
2061 /* Before working hard, check whether the instruction is available. */
2062 if (optab_handler (builtin_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2065 target
= gen_reg_rtx (mode
);
2067 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2070 /* Always stabilize the argument list. */
2071 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2072 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2074 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2075 op1
= expand_normal (arg1
);
2079 /* Compute into TARGET.
2080 Set TARGET to wherever the result comes back. */
2081 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2082 target
, 0, OPTAB_DIRECT
);
2084 /* If we were unable to expand via the builtin, stop the sequence
2085 (without outputting the insns) and call to the library function
2086 with the stabilized argument list. */
2090 return expand_call (exp
, target
, target
== const0_rtx
);
2094 expand_errno_check (exp
, target
);
2096 /* Output the entire sequence. */
2097 insns
= get_insns ();
2104 /* Expand a call to the builtin sin and cos math functions.
2105 Return NULL_RTX if a normal call should be emitted rather than expanding the
2106 function in-line. EXP is the expression that is a call to the builtin
2107 function; if convenient, the result should be placed in TARGET.
2108 SUBTARGET may be used as the target for computing one of EXP's
2112 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2114 optab builtin_optab
;
2116 tree fndecl
= get_callee_fndecl (exp
);
2117 enum machine_mode mode
;
2120 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2123 arg
= CALL_EXPR_ARG (exp
, 0);
2125 switch (DECL_FUNCTION_CODE (fndecl
))
2127 CASE_FLT_FN (BUILT_IN_SIN
):
2128 CASE_FLT_FN (BUILT_IN_COS
):
2129 builtin_optab
= sincos_optab
; break;
2134 /* Make a suitable register to place result in. */
2135 mode
= TYPE_MODE (TREE_TYPE (exp
));
2137 /* Check if sincos insn is available, otherwise fallback
2138 to sin or cos insn. */
2139 if (optab_handler (builtin_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2140 switch (DECL_FUNCTION_CODE (fndecl
))
2142 CASE_FLT_FN (BUILT_IN_SIN
):
2143 builtin_optab
= sin_optab
; break;
2144 CASE_FLT_FN (BUILT_IN_COS
):
2145 builtin_optab
= cos_optab
; break;
2150 /* Before working hard, check whether the instruction is available. */
2151 if (optab_handler (builtin_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
2153 target
= gen_reg_rtx (mode
);
2155 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2156 need to expand the argument again. This way, we will not perform
2157 side-effects more the once. */
2158 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2160 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2164 /* Compute into TARGET.
2165 Set TARGET to wherever the result comes back. */
2166 if (builtin_optab
== sincos_optab
)
2170 switch (DECL_FUNCTION_CODE (fndecl
))
2172 CASE_FLT_FN (BUILT_IN_SIN
):
2173 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2175 CASE_FLT_FN (BUILT_IN_COS
):
2176 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2181 gcc_assert (result
);
2185 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2190 /* Output the entire sequence. */
2191 insns
= get_insns ();
2197 /* If we were unable to expand via the builtin, stop the sequence
2198 (without outputting the insns) and call to the library function
2199 with the stabilized argument list. */
2203 target
= expand_call (exp
, target
, target
== const0_rtx
);
2208 /* Expand a call to one of the builtin math functions that operate on
2209 floating point argument and output an integer result (ilogb, isinf,
2211 Return 0 if a normal call should be emitted rather than expanding the
2212 function in-line. EXP is the expression that is a call to the builtin
2213 function; if convenient, the result should be placed in TARGET.
2214 SUBTARGET may be used as the target for computing one of EXP's operands. */
2217 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2219 optab builtin_optab
= 0;
2220 enum insn_code icode
= CODE_FOR_nothing
;
2222 tree fndecl
= get_callee_fndecl (exp
);
2223 enum machine_mode mode
;
2224 bool errno_set
= false;
2227 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2230 arg
= CALL_EXPR_ARG (exp
, 0);
2232 switch (DECL_FUNCTION_CODE (fndecl
))
2234 CASE_FLT_FN (BUILT_IN_ILOGB
):
2235 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2236 CASE_FLT_FN (BUILT_IN_ISINF
):
2237 builtin_optab
= isinf_optab
; break;
2238 case BUILT_IN_ISNORMAL
:
2239 case BUILT_IN_ISFINITE
:
2240 CASE_FLT_FN (BUILT_IN_FINITE
):
2241 /* These builtins have no optabs (yet). */
2247 /* There's no easy way to detect the case we need to set EDOM. */
2248 if (flag_errno_math
&& errno_set
)
2251 /* Optab mode depends on the mode of the input argument. */
2252 mode
= TYPE_MODE (TREE_TYPE (arg
));
2255 icode
= optab_handler (builtin_optab
, mode
)->insn_code
;
2257 /* Before working hard, check whether the instruction is available. */
2258 if (icode
!= CODE_FOR_nothing
)
2260 /* Make a suitable register to place result in. */
2262 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2263 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2265 gcc_assert (insn_data
[icode
].operand
[0].predicate
2266 (target
, GET_MODE (target
)));
2268 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2269 need to expand the argument again. This way, we will not perform
2270 side-effects more the once. */
2271 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2273 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2275 if (mode
!= GET_MODE (op0
))
2276 op0
= convert_to_mode (mode
, op0
, 0);
2278 /* Compute into TARGET.
2279 Set TARGET to wherever the result comes back. */
2280 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2284 /* If there is no optab, try generic code. */
2285 switch (DECL_FUNCTION_CODE (fndecl
))
2289 CASE_FLT_FN (BUILT_IN_ISINF
):
2291 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2292 tree
const isgr_fn
= built_in_decls
[BUILT_IN_ISGREATER
];
2293 tree
const type
= TREE_TYPE (arg
);
2297 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2298 real_from_string (&r
, buf
);
2299 result
= build_call_expr (isgr_fn
, 2,
2300 fold_build1 (ABS_EXPR
, type
, arg
),
2301 build_real (type
, r
));
2302 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2304 CASE_FLT_FN (BUILT_IN_FINITE
):
2305 case BUILT_IN_ISFINITE
:
2307 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2308 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2309 tree
const type
= TREE_TYPE (arg
);
2313 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2314 real_from_string (&r
, buf
);
2315 result
= build_call_expr (isle_fn
, 2,
2316 fold_build1 (ABS_EXPR
, type
, arg
),
2317 build_real (type
, r
));
2318 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2320 case BUILT_IN_ISNORMAL
:
2322 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2323 islessequal(fabs(x),DBL_MAX). */
2324 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2325 tree
const isge_fn
= built_in_decls
[BUILT_IN_ISGREATEREQUAL
];
2326 tree
const type
= TREE_TYPE (arg
);
2327 REAL_VALUE_TYPE rmax
, rmin
;
2330 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2331 real_from_string (&rmax
, buf
);
2332 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
2333 real_from_string (&rmin
, buf
);
2334 arg
= builtin_save_expr (fold_build1 (ABS_EXPR
, type
, arg
));
2335 result
= build_call_expr (isle_fn
, 2, arg
,
2336 build_real (type
, rmax
));
2337 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
2338 build_call_expr (isge_fn
, 2, arg
,
2339 build_real (type
, rmin
)));
2340 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2346 target
= expand_call (exp
, target
, target
== const0_rtx
);
2351 /* Expand a call to the builtin sincos math function.
2352 Return NULL_RTX if a normal call should be emitted rather than expanding the
2353 function in-line. EXP is the expression that is a call to the builtin
2357 expand_builtin_sincos (tree exp
)
2359 rtx op0
, op1
, op2
, target1
, target2
;
2360 enum machine_mode mode
;
2361 tree arg
, sinp
, cosp
;
2364 if (!validate_arglist (exp
, REAL_TYPE
,
2365 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2368 arg
= CALL_EXPR_ARG (exp
, 0);
2369 sinp
= CALL_EXPR_ARG (exp
, 1);
2370 cosp
= CALL_EXPR_ARG (exp
, 2);
2372 /* Make a suitable register to place result in. */
2373 mode
= TYPE_MODE (TREE_TYPE (arg
));
2375 /* Check if sincos insn is available, otherwise emit the call. */
2376 if (optab_handler (sincos_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2379 target1
= gen_reg_rtx (mode
);
2380 target2
= gen_reg_rtx (mode
);
2382 op0
= expand_normal (arg
);
2383 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2384 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2386 /* Compute into target1 and target2.
2387 Set TARGET to wherever the result comes back. */
2388 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2389 gcc_assert (result
);
2391 /* Move target1 and target2 to the memory locations indicated
2393 emit_move_insn (op1
, target1
);
2394 emit_move_insn (op2
, target2
);
2399 /* Expand a call to the internal cexpi builtin to the sincos math function.
2400 EXP is the expression that is a call to the builtin function; if convenient,
2401 the result should be placed in TARGET. SUBTARGET may be used as the target
2402 for computing one of EXP's operands. */
2405 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2407 tree fndecl
= get_callee_fndecl (exp
);
2409 enum machine_mode mode
;
2412 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2415 arg
= CALL_EXPR_ARG (exp
, 0);
2416 type
= TREE_TYPE (arg
);
2417 mode
= TYPE_MODE (TREE_TYPE (arg
));
2419 /* Try expanding via a sincos optab, fall back to emitting a libcall
2420 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2421 is only generated from sincos, cexp or if we have either of them. */
2422 if (optab_handler (sincos_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
2424 op1
= gen_reg_rtx (mode
);
2425 op2
= gen_reg_rtx (mode
);
2427 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2429 /* Compute into op1 and op2. */
2430 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2432 else if (TARGET_HAS_SINCOS
)
2434 tree call
, fn
= NULL_TREE
;
2438 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2439 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2440 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2441 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2442 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2443 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2447 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2448 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2449 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2450 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2451 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2452 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2454 /* Make sure not to fold the sincos call again. */
2455 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2456 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2457 call
, 3, arg
, top1
, top2
));
2461 tree call
, fn
= NULL_TREE
, narg
;
2462 tree ctype
= build_complex_type (type
);
2464 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2465 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2466 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2467 fn
= built_in_decls
[BUILT_IN_CEXP
];
2468 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2469 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2473 /* If we don't have a decl for cexp create one. This is the
2474 friendliest fallback if the user calls __builtin_cexpi
2475 without full target C99 function support. */
2476 if (fn
== NULL_TREE
)
2479 const char *name
= NULL
;
2481 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2483 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2485 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2488 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2489 fn
= build_fn_decl (name
, fntype
);
2492 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2493 build_real (type
, dconst0
), arg
);
2495 /* Make sure not to fold the cexp call again. */
2496 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2497 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2498 target
, VOIDmode
, EXPAND_NORMAL
);
2501 /* Now build the proper return type. */
2502 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2503 make_tree (TREE_TYPE (arg
), op2
),
2504 make_tree (TREE_TYPE (arg
), op1
)),
2505 target
, VOIDmode
, EXPAND_NORMAL
);
2508 /* Expand a call to one of the builtin rounding functions gcc defines
2509 as an extension (lfloor and lceil). As these are gcc extensions we
2510 do not need to worry about setting errno to EDOM.
2511 If expanding via optab fails, lower expression to (int)(floor(x)).
2512 EXP is the expression that is a call to the builtin function;
2513 if convenient, the result should be placed in TARGET. */
2516 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2518 convert_optab builtin_optab
;
2519 rtx op0
, insns
, tmp
;
2520 tree fndecl
= get_callee_fndecl (exp
);
2521 enum built_in_function fallback_fn
;
2522 tree fallback_fndecl
;
2523 enum machine_mode mode
;
2526 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2529 arg
= CALL_EXPR_ARG (exp
, 0);
2531 switch (DECL_FUNCTION_CODE (fndecl
))
2533 CASE_FLT_FN (BUILT_IN_LCEIL
):
2534 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2535 builtin_optab
= lceil_optab
;
2536 fallback_fn
= BUILT_IN_CEIL
;
2539 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2540 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2541 builtin_optab
= lfloor_optab
;
2542 fallback_fn
= BUILT_IN_FLOOR
;
2549 /* Make a suitable register to place result in. */
2550 mode
= TYPE_MODE (TREE_TYPE (exp
));
2552 target
= gen_reg_rtx (mode
);
2554 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2555 need to expand the argument again. This way, we will not perform
2556 side-effects more the once. */
2557 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2559 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2563 /* Compute into TARGET. */
2564 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2566 /* Output the entire sequence. */
2567 insns
= get_insns ();
2573 /* If we were unable to expand via the builtin, stop the sequence
2574 (without outputting the insns). */
2577 /* Fall back to floating point rounding optab. */
2578 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2580 /* For non-C99 targets we may end up without a fallback fndecl here
2581 if the user called __builtin_lfloor directly. In this case emit
2582 a call to the floor/ceil variants nevertheless. This should result
2583 in the best user experience for not full C99 targets. */
2584 if (fallback_fndecl
== NULL_TREE
)
2587 const char *name
= NULL
;
2589 switch (DECL_FUNCTION_CODE (fndecl
))
2591 case BUILT_IN_LCEIL
:
2592 case BUILT_IN_LLCEIL
:
2595 case BUILT_IN_LCEILF
:
2596 case BUILT_IN_LLCEILF
:
2599 case BUILT_IN_LCEILL
:
2600 case BUILT_IN_LLCEILL
:
2603 case BUILT_IN_LFLOOR
:
2604 case BUILT_IN_LLFLOOR
:
2607 case BUILT_IN_LFLOORF
:
2608 case BUILT_IN_LLFLOORF
:
2611 case BUILT_IN_LFLOORL
:
2612 case BUILT_IN_LLFLOORL
:
2619 fntype
= build_function_type_list (TREE_TYPE (arg
),
2620 TREE_TYPE (arg
), NULL_TREE
);
2621 fallback_fndecl
= build_fn_decl (name
, fntype
);
2624 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2626 tmp
= expand_normal (exp
);
2628 /* Truncate the result of floating point optab to integer
2629 via expand_fix (). */
2630 target
= gen_reg_rtx (mode
);
2631 expand_fix (target
, tmp
, 0);
2636 /* Expand a call to one of the builtin math functions doing integer
2638 Return 0 if a normal call should be emitted rather than expanding the
2639 function in-line. EXP is the expression that is a call to the builtin
2640 function; if convenient, the result should be placed in TARGET. */
2643 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2645 convert_optab builtin_optab
;
2647 tree fndecl
= get_callee_fndecl (exp
);
2649 enum machine_mode mode
;
2651 /* There's no easy way to detect the case we need to set EDOM. */
2652 if (flag_errno_math
)
2655 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2658 arg
= CALL_EXPR_ARG (exp
, 0);
2660 switch (DECL_FUNCTION_CODE (fndecl
))
2662 CASE_FLT_FN (BUILT_IN_LRINT
):
2663 CASE_FLT_FN (BUILT_IN_LLRINT
):
2664 builtin_optab
= lrint_optab
; break;
2665 CASE_FLT_FN (BUILT_IN_LROUND
):
2666 CASE_FLT_FN (BUILT_IN_LLROUND
):
2667 builtin_optab
= lround_optab
; break;
2672 /* Make a suitable register to place result in. */
2673 mode
= TYPE_MODE (TREE_TYPE (exp
));
2675 target
= gen_reg_rtx (mode
);
2677 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2678 need to expand the argument again. This way, we will not perform
2679 side-effects more the once. */
2680 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2682 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2686 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2688 /* Output the entire sequence. */
2689 insns
= get_insns ();
2695 /* If we were unable to expand via the builtin, stop the sequence
2696 (without outputting the insns) and call to the library function
2697 with the stabilized argument list. */
2700 target
= expand_call (exp
, target
, target
== const0_rtx
);
2705 /* To evaluate powi(x,n), the floating point value x raised to the
2706 constant integer exponent n, we use a hybrid algorithm that
2707 combines the "window method" with look-up tables. For an
2708 introduction to exponentiation algorithms and "addition chains",
2709 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2710 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2711 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2712 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2714 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2715 multiplications to inline before calling the system library's pow
2716 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2717 so this default never requires calling pow, powf or powl. */
2719 #ifndef POWI_MAX_MULTS
2720 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2723 /* The size of the "optimal power tree" lookup table. All
2724 exponents less than this value are simply looked up in the
2725 powi_table below. This threshold is also used to size the
2726 cache of pseudo registers that hold intermediate results. */
2727 #define POWI_TABLE_SIZE 256
2729 /* The size, in bits of the window, used in the "window method"
2730 exponentiation algorithm. This is equivalent to a radix of
2731 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2732 #define POWI_WINDOW_SIZE 3
2734 /* The following table is an efficient representation of an
2735 "optimal power tree". For each value, i, the corresponding
2736 value, j, in the table states than an optimal evaluation
2737 sequence for calculating pow(x,i) can be found by evaluating
2738 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2739 100 integers is given in Knuth's "Seminumerical algorithms". */
2741 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2743 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2744 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2745 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2746 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2747 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2748 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2749 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2750 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2751 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2752 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2753 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2754 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2755 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2756 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2757 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2758 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2759 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2760 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2761 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2762 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2763 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2764 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2765 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2766 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2767 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2768 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2769 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2770 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2771 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2772 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2773 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2774 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2778 /* Return the number of multiplications required to calculate
2779 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2780 subroutine of powi_cost. CACHE is an array indicating
2781 which exponents have already been calculated. */
2784 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2786 /* If we've already calculated this exponent, then this evaluation
2787 doesn't require any additional multiplications. */
2792 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2793 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) for an arbitrary x, given the exponent N. This
2798 function needs to be kept in sync with expand_powi below. */
2801 powi_cost (HOST_WIDE_INT n
)
2803 bool cache
[POWI_TABLE_SIZE
];
2804 unsigned HOST_WIDE_INT digit
;
2805 unsigned HOST_WIDE_INT val
;
2811 /* Ignore the reciprocal when calculating the cost. */
2812 val
= (n
< 0) ? -n
: n
;
2814 /* Initialize the exponent cache. */
2815 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2820 while (val
>= POWI_TABLE_SIZE
)
2824 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2825 result
+= powi_lookup_cost (digit
, cache
)
2826 + POWI_WINDOW_SIZE
+ 1;
2827 val
>>= POWI_WINDOW_SIZE
;
2836 return result
+ powi_lookup_cost (val
, cache
);
2839 /* Recursive subroutine of expand_powi. This function takes the array,
2840 CACHE, of already calculated exponents and an exponent N and returns
2841 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2844 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2846 unsigned HOST_WIDE_INT digit
;
2850 if (n
< POWI_TABLE_SIZE
)
2855 target
= gen_reg_rtx (mode
);
2858 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2859 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2863 target
= gen_reg_rtx (mode
);
2864 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2865 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2866 op1
= expand_powi_1 (mode
, digit
, cache
);
2870 target
= gen_reg_rtx (mode
);
2871 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2875 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2876 if (result
!= target
)
2877 emit_move_insn (target
, result
);
2881 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2882 floating point operand in mode MODE, and N is the exponent. This
2883 function needs to be kept in sync with powi_cost above. */
2886 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2888 unsigned HOST_WIDE_INT val
;
2889 rtx cache
[POWI_TABLE_SIZE
];
2893 return CONST1_RTX (mode
);
2895 val
= (n
< 0) ? -n
: n
;
2897 memset (cache
, 0, sizeof (cache
));
2900 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2902 /* If the original exponent was negative, reciprocate the result. */
2904 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2905 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2910 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2911 a normal call should be emitted rather than expanding the function
2912 in-line. EXP is the expression that is a call to the builtin
2913 function; if convenient, the result should be placed in TARGET. */
2916 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2920 tree type
= TREE_TYPE (exp
);
2921 REAL_VALUE_TYPE cint
, c
, c2
;
2924 enum machine_mode mode
= TYPE_MODE (type
);
2926 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2929 arg0
= CALL_EXPR_ARG (exp
, 0);
2930 arg1
= CALL_EXPR_ARG (exp
, 1);
2932 if (TREE_CODE (arg1
) != REAL_CST
2933 || TREE_OVERFLOW (arg1
))
2934 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2936 /* Handle constant exponents. */
2938 /* For integer valued exponents we can expand to an optimal multiplication
2939 sequence using expand_powi. */
2940 c
= TREE_REAL_CST (arg1
);
2941 n
= real_to_integer (&c
);
2942 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2943 if (real_identical (&c
, &cint
)
2944 && ((n
>= -1 && n
<= 2)
2945 || (flag_unsafe_math_optimizations
2946 && optimize_insn_for_speed_p ()
2947 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2949 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2952 op
= force_reg (mode
, op
);
2953 op
= expand_powi (op
, mode
, n
);
2958 narg0
= builtin_save_expr (arg0
);
2960 /* If the exponent is not integer valued, check if it is half of an integer.
2961 In this case we can expand to sqrt (x) * x**(n/2). */
2962 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2963 if (fn
!= NULL_TREE
)
2965 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2966 n
= real_to_integer (&c2
);
2967 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2968 if (real_identical (&c2
, &cint
)
2969 && ((flag_unsafe_math_optimizations
2970 && optimize_insn_for_speed_p ()
2971 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2974 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2975 /* Use expand_expr in case the newly built call expression
2976 was folded to a non-call. */
2977 op
= expand_expr (call_expr
, subtarget
, mode
, EXPAND_NORMAL
);
2980 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2981 op2
= force_reg (mode
, op2
);
2982 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2983 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2984 0, OPTAB_LIB_WIDEN
);
2985 /* If the original exponent was negative, reciprocate the
2988 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2989 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2995 /* Try if the exponent is a third of an integer. In this case
2996 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2997 different from pow (x, 1./3.) due to rounding and behavior
2998 with negative x we need to constrain this transformation to
2999 unsafe math and positive x or finite math. */
3000 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
3002 && flag_unsafe_math_optimizations
3003 && (tree_expr_nonnegative_p (arg0
)
3004 || !HONOR_NANS (mode
)))
3006 REAL_VALUE_TYPE dconst3
;
3007 real_from_integer (&dconst3
, VOIDmode
, 3, 0, 0);
3008 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
3009 real_round (&c2
, mode
, &c2
);
3010 n
= real_to_integer (&c2
);
3011 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3012 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
3013 real_convert (&c2
, mode
, &c2
);
3014 if (real_identical (&c2
, &c
)
3015 && ((optimize_insn_for_speed_p ()
3016 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
3019 tree call_expr
= build_call_expr (fn
, 1,narg0
);
3020 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
3021 if (abs (n
) % 3 == 2)
3022 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
3023 0, OPTAB_LIB_WIDEN
);
3026 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3027 op2
= force_reg (mode
, op2
);
3028 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
3029 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3030 0, OPTAB_LIB_WIDEN
);
3031 /* If the original exponent was negative, reciprocate the
3034 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3035 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3041 /* Fall back to optab expansion. */
3042 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3045 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3046 a normal call should be emitted rather than expanding the function
3047 in-line. EXP is the expression that is a call to the builtin
3048 function; if convenient, the result should be placed in TARGET. */
3051 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
3055 enum machine_mode mode
;
3056 enum machine_mode mode2
;
3058 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3061 arg0
= CALL_EXPR_ARG (exp
, 0);
3062 arg1
= CALL_EXPR_ARG (exp
, 1);
3063 mode
= TYPE_MODE (TREE_TYPE (exp
));
3065 /* Handle constant power. */
3067 if (TREE_CODE (arg1
) == INTEGER_CST
3068 && !TREE_OVERFLOW (arg1
))
3070 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3072 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3073 Otherwise, check the number of multiplications required. */
3074 if ((TREE_INT_CST_HIGH (arg1
) == 0
3075 || TREE_INT_CST_HIGH (arg1
) == -1)
3076 && ((n
>= -1 && n
<= 2)
3077 || (optimize_insn_for_speed_p ()
3078 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3080 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3081 op0
= force_reg (mode
, op0
);
3082 return expand_powi (op0
, mode
, n
);
3086 /* Emit a libcall to libgcc. */
3088 /* Mode of the 2nd argument must match that of an int. */
3089 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3091 if (target
== NULL_RTX
)
3092 target
= gen_reg_rtx (mode
);
3094 op0
= expand_expr (arg0
, subtarget
, mode
, EXPAND_NORMAL
);
3095 if (GET_MODE (op0
) != mode
)
3096 op0
= convert_to_mode (mode
, op0
, 0);
3097 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3098 if (GET_MODE (op1
) != mode2
)
3099 op1
= convert_to_mode (mode2
, op1
, 0);
3101 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3102 target
, LCT_CONST
, mode
, 2,
3103 op0
, mode
, op1
, mode2
);
3108 /* Expand expression EXP which is a call to the strlen builtin. Return
3109 NULL_RTX if we failed the caller should emit a normal call, otherwise
3110 try to get the result in TARGET, if convenient. */
3113 expand_builtin_strlen (tree exp
, rtx target
,
3114 enum machine_mode target_mode
)
3116 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3122 tree src
= CALL_EXPR_ARG (exp
, 0);
3123 rtx result
, src_reg
, char_rtx
, before_strlen
;
3124 enum machine_mode insn_mode
= target_mode
, char_mode
;
3125 enum insn_code icode
= CODE_FOR_nothing
;
3128 /* If the length can be computed at compile-time, return it. */
3129 len
= c_strlen (src
, 0);
3131 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3133 /* If the length can be computed at compile-time and is constant
3134 integer, but there are side-effects in src, evaluate
3135 src for side-effects, then return len.
3136 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3137 can be optimized into: i++; x = 3; */
3138 len
= c_strlen (src
, 1);
3139 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3141 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3142 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3145 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3147 /* If SRC is not a pointer type, don't do this operation inline. */
3151 /* Bail out if we can't compute strlen in the right mode. */
3152 while (insn_mode
!= VOIDmode
)
3154 icode
= optab_handler (strlen_optab
, insn_mode
)->insn_code
;
3155 if (icode
!= CODE_FOR_nothing
)
3158 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3160 if (insn_mode
== VOIDmode
)
3163 /* Make a place to write the result of the instruction. */
3167 && GET_MODE (result
) == insn_mode
3168 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3169 result
= gen_reg_rtx (insn_mode
);
3171 /* Make a place to hold the source address. We will not expand
3172 the actual source until we are sure that the expansion will
3173 not fail -- there are trees that cannot be expanded twice. */
3174 src_reg
= gen_reg_rtx (Pmode
);
3176 /* Mark the beginning of the strlen sequence so we can emit the
3177 source operand later. */
3178 before_strlen
= get_last_insn ();
3180 char_rtx
= const0_rtx
;
3181 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3182 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3184 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3186 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3187 char_rtx
, GEN_INT (align
));
3192 /* Now that we are assured of success, expand the source. */
3194 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3196 emit_move_insn (src_reg
, pat
);
3201 emit_insn_after (pat
, before_strlen
);
3203 emit_insn_before (pat
, get_insns ());
3205 /* Return the value in the proper mode for this function. */
3206 if (GET_MODE (result
) == target_mode
)
3208 else if (target
!= 0)
3209 convert_move (target
, result
, 0);
3211 target
= convert_to_mode (target_mode
, result
, 0);
3217 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3218 caller should emit a normal call, otherwise try to get the result
3219 in TARGET, if convenient (and in mode MODE if that's convenient). */
3222 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3224 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3226 tree type
= TREE_TYPE (exp
);
3227 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3228 CALL_EXPR_ARG (exp
, 1), type
);
3230 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3235 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3240 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3242 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3244 tree type
= TREE_TYPE (exp
);
3245 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3246 CALL_EXPR_ARG (exp
, 1), type
);
3248 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3250 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3255 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3256 caller should emit a normal call, otherwise try to get the result
3257 in TARGET, if convenient (and in mode MODE if that's convenient). */
3260 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3262 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3264 tree type
= TREE_TYPE (exp
);
3265 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3266 CALL_EXPR_ARG (exp
, 1), type
);
3268 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3273 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3274 caller should emit a normal call, otherwise try to get the result
3275 in TARGET, if convenient (and in mode MODE if that's convenient). */
3278 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3280 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3282 tree type
= TREE_TYPE (exp
);
3283 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3284 CALL_EXPR_ARG (exp
, 1), type
);
3286 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3291 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3292 bytes from constant string DATA + OFFSET and return it as target
3296 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3297 enum machine_mode mode
)
3299 const char *str
= (const char *) data
;
3301 gcc_assert (offset
>= 0
3302 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3303 <= strlen (str
) + 1));
3305 return c_readstr (str
+ offset
, mode
);
3308 /* Expand a call EXP to the memcpy builtin.
3309 Return NULL_RTX if we failed, the caller should emit a normal call,
3310 otherwise try to get the result in TARGET, if convenient (and in
3311 mode MODE if that's convenient). */
3314 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3316 tree fndecl
= get_callee_fndecl (exp
);
3318 if (!validate_arglist (exp
,
3319 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3323 tree dest
= CALL_EXPR_ARG (exp
, 0);
3324 tree src
= CALL_EXPR_ARG (exp
, 1);
3325 tree len
= CALL_EXPR_ARG (exp
, 2);
3326 const char *src_str
;
3327 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3328 unsigned int dest_align
3329 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3330 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3331 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3332 TREE_TYPE (TREE_TYPE (fndecl
)),
3334 HOST_WIDE_INT expected_size
= -1;
3335 unsigned int expected_align
= 0;
3336 tree_ann_common_t ann
;
3340 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3342 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3344 result
= TREE_OPERAND (result
, 1);
3346 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3349 /* If DEST is not a pointer type, call the normal function. */
3350 if (dest_align
== 0)
3353 /* If either SRC is not a pointer type, don't do this
3354 operation in-line. */
3358 ann
= tree_common_ann (exp
);
3360 stringop_block_profile (ann
->stmt
, &expected_align
, &expected_size
);
3362 if (expected_align
< dest_align
)
3363 expected_align
= dest_align
;
3364 dest_mem
= get_memory_rtx (dest
, len
);
3365 set_mem_align (dest_mem
, dest_align
);
3366 len_rtx
= expand_normal (len
);
3367 src_str
= c_getstr (src
);
3369 /* If SRC is a string constant and block move would be done
3370 by pieces, we can avoid loading the string from memory
3371 and only stored the computed constants. */
3373 && GET_CODE (len_rtx
) == CONST_INT
3374 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3375 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3376 CONST_CAST (char *, src_str
),
3379 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3380 builtin_memcpy_read_str
,
3381 CONST_CAST (char *, src_str
),
3382 dest_align
, false, 0);
3383 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3384 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3388 src_mem
= get_memory_rtx (src
, len
);
3389 set_mem_align (src_mem
, src_align
);
3391 /* Copy word part most expediently. */
3392 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3393 CALL_EXPR_TAILCALL (exp
)
3394 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3395 expected_align
, expected_size
);
3399 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3400 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3406 /* Expand a call EXP to the mempcpy builtin.
3407 Return NULL_RTX if we failed; the caller should emit a normal call,
3408 otherwise try to get the result in TARGET, if convenient (and in
3409 mode MODE if that's convenient). If ENDP is 0 return the
3410 destination pointer, if ENDP is 1 return the end pointer ala
3411 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3415 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3417 if (!validate_arglist (exp
,
3418 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3422 tree dest
= CALL_EXPR_ARG (exp
, 0);
3423 tree src
= CALL_EXPR_ARG (exp
, 1);
3424 tree len
= CALL_EXPR_ARG (exp
, 2);
3425 return expand_builtin_mempcpy_args (dest
, src
, len
,
3427 target
, mode
, /*endp=*/ 1);
3431 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3432 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3433 so that this can also be called without constructing an actual CALL_EXPR.
3434 TYPE is the return type of the call. The other arguments and return value
3435 are the same as for expand_builtin_mempcpy. */
3438 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3439 rtx target
, enum machine_mode mode
, int endp
)
3441 /* If return value is ignored, transform mempcpy into memcpy. */
3442 if (target
== const0_rtx
&& implicit_built_in_decls
[BUILT_IN_MEMCPY
])
3444 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3445 tree result
= build_call_expr (fn
, 3, dest
, src
, len
);
3447 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3449 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3451 result
= TREE_OPERAND (result
, 1);
3453 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3457 const char *src_str
;
3458 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3459 unsigned int dest_align
3460 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3461 rtx dest_mem
, src_mem
, len_rtx
;
3462 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3466 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3468 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3470 result
= TREE_OPERAND (result
, 1);
3472 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3475 /* If either SRC or DEST is not a pointer type, don't do this
3476 operation in-line. */
3477 if (dest_align
== 0 || src_align
== 0)
3480 /* If LEN is not constant, call the normal function. */
3481 if (! host_integerp (len
, 1))
3484 len_rtx
= expand_normal (len
);
3485 src_str
= c_getstr (src
);
3487 /* If SRC is a string constant and block move would be done
3488 by pieces, we can avoid loading the string from memory
3489 and only stored the computed constants. */
3491 && GET_CODE (len_rtx
) == CONST_INT
3492 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3493 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3494 CONST_CAST (char *, src_str
),
3497 dest_mem
= get_memory_rtx (dest
, len
);
3498 set_mem_align (dest_mem
, dest_align
);
3499 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3500 builtin_memcpy_read_str
,
3501 CONST_CAST (char *, src_str
),
3502 dest_align
, false, endp
);
3503 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3504 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3508 if (GET_CODE (len_rtx
) == CONST_INT
3509 && can_move_by_pieces (INTVAL (len_rtx
),
3510 MIN (dest_align
, src_align
)))
3512 dest_mem
= get_memory_rtx (dest
, len
);
3513 set_mem_align (dest_mem
, dest_align
);
3514 src_mem
= get_memory_rtx (src
, len
);
3515 set_mem_align (src_mem
, src_align
);
3516 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3517 MIN (dest_align
, src_align
), endp
);
3518 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3519 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3527 /* Expand expression EXP, which is a call to the memmove builtin. Return
3528 NULL_RTX if we failed; the caller should emit a normal call. */
3531 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3533 if (!validate_arglist (exp
,
3534 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3538 tree dest
= CALL_EXPR_ARG (exp
, 0);
3539 tree src
= CALL_EXPR_ARG (exp
, 1);
3540 tree len
= CALL_EXPR_ARG (exp
, 2);
3541 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3542 target
, mode
, ignore
);
3546 /* Helper function to do the actual work for expand_builtin_memmove. The
3547 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3548 so that this can also be called without constructing an actual CALL_EXPR.
3549 TYPE is the return type of the call. The other arguments and return value
3550 are the same as for expand_builtin_memmove. */
3553 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3554 tree type
, rtx target
, enum machine_mode mode
,
3557 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3561 STRIP_TYPE_NOPS (result
);
3562 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3564 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3566 result
= TREE_OPERAND (result
, 1);
3568 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3571 /* Otherwise, call the normal function. */
3575 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3576 NULL_RTX if we failed the caller should emit a normal call. */
3579 expand_builtin_bcopy (tree exp
, int ignore
)
3581 tree type
= TREE_TYPE (exp
);
3582 tree src
, dest
, size
;
3584 if (!validate_arglist (exp
,
3585 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3588 src
= CALL_EXPR_ARG (exp
, 0);
3589 dest
= CALL_EXPR_ARG (exp
, 1);
3590 size
= CALL_EXPR_ARG (exp
, 2);
3592 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3593 This is done this way so that if it isn't expanded inline, we fall
3594 back to calling bcopy instead of memmove. */
3595 return expand_builtin_memmove_args (dest
, src
,
3596 fold_convert (sizetype
, size
),
3597 type
, const0_rtx
, VOIDmode
,
3602 # define HAVE_movstr 0
3603 # define CODE_FOR_movstr CODE_FOR_nothing
3606 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3607 we failed, the caller should emit a normal call, otherwise try to
3608 get the result in TARGET, if convenient. If ENDP is 0 return the
3609 destination pointer, if ENDP is 1 return the end pointer ala
3610 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3614 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3620 const struct insn_data
* data
;
3625 dest_mem
= get_memory_rtx (dest
, NULL
);
3626 src_mem
= get_memory_rtx (src
, NULL
);
3629 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3630 dest_mem
= replace_equiv_address (dest_mem
, target
);
3631 end
= gen_reg_rtx (Pmode
);
3635 if (target
== 0 || target
== const0_rtx
)
3637 end
= gen_reg_rtx (Pmode
);
3645 data
= insn_data
+ CODE_FOR_movstr
;
3647 if (data
->operand
[0].mode
!= VOIDmode
)
3648 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3650 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3656 /* movstr is supposed to set end to the address of the NUL
3657 terminator. If the caller requested a mempcpy-like return value,
3659 if (endp
== 1 && target
!= const0_rtx
)
3661 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3662 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3668 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3669 NULL_RTX if we failed the caller should emit a normal call, otherwise
3670 try to get the result in TARGET, if convenient (and in mode MODE if that's
3674 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3676 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3678 tree dest
= CALL_EXPR_ARG (exp
, 0);
3679 tree src
= CALL_EXPR_ARG (exp
, 1);
3680 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3685 /* Helper function to do the actual work for expand_builtin_strcpy. The
3686 arguments to the builtin_strcpy call DEST and SRC are broken out
3687 so that this can also be called without constructing an actual CALL_EXPR.
3688 The other arguments and return value are the same as for
3689 expand_builtin_strcpy. */
3692 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3693 rtx target
, enum machine_mode mode
)
3695 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3697 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3698 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3702 /* Expand a call EXP to the stpcpy builtin.
3703 Return NULL_RTX if we failed the caller should emit a normal call,
3704 otherwise try to get the result in TARGET, if convenient (and in
3705 mode MODE if that's convenient). */
3708 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3712 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3715 dst
= CALL_EXPR_ARG (exp
, 0);
3716 src
= CALL_EXPR_ARG (exp
, 1);
3718 /* If return value is ignored, transform stpcpy into strcpy. */
3719 if (target
== const0_rtx
&& implicit_built_in_decls
[BUILT_IN_STRCPY
])
3721 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3722 tree result
= build_call_expr (fn
, 2, dst
, src
);
3724 STRIP_NOPS (result
);
3725 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3727 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3729 result
= TREE_OPERAND (result
, 1);
3731 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3738 /* Ensure we get an actual string whose length can be evaluated at
3739 compile-time, not an expression containing a string. This is
3740 because the latter will potentially produce pessimized code
3741 when used to produce the return value. */
3742 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3743 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3745 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3746 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3747 target
, mode
, /*endp=*/2);
3752 if (TREE_CODE (len
) == INTEGER_CST
)
3754 rtx len_rtx
= expand_normal (len
);
3756 if (GET_CODE (len_rtx
) == CONST_INT
)
3758 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3759 dst
, src
, target
, mode
);
3765 if (mode
!= VOIDmode
)
3766 target
= gen_reg_rtx (mode
);
3768 target
= gen_reg_rtx (GET_MODE (ret
));
3770 if (GET_MODE (target
) != GET_MODE (ret
))
3771 ret
= gen_lowpart (GET_MODE (target
), ret
);
3773 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3774 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3782 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3786 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3787 bytes from constant string DATA + OFFSET and return it as target
3791 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3792 enum machine_mode mode
)
3794 const char *str
= (const char *) data
;
3796 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3799 return c_readstr (str
+ offset
, mode
);
3802 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3803 NULL_RTX if we failed the caller should emit a normal call. */
3806 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3808 tree fndecl
= get_callee_fndecl (exp
);
3810 if (validate_arglist (exp
,
3811 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3813 tree dest
= CALL_EXPR_ARG (exp
, 0);
3814 tree src
= CALL_EXPR_ARG (exp
, 1);
3815 tree len
= CALL_EXPR_ARG (exp
, 2);
3816 tree slen
= c_strlen (src
, 1);
3817 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3821 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3823 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3825 result
= TREE_OPERAND (result
, 1);
3827 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3830 /* We must be passed a constant len and src parameter. */
3831 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3834 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3836 /* We're required to pad with trailing zeros if the requested
3837 len is greater than strlen(s2)+1. In that case try to
3838 use store_by_pieces, if it fails, punt. */
3839 if (tree_int_cst_lt (slen
, len
))
3841 unsigned int dest_align
3842 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3843 const char *p
= c_getstr (src
);
3846 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3847 || !can_store_by_pieces (tree_low_cst (len
, 1),
3848 builtin_strncpy_read_str
,
3849 CONST_CAST (char *, p
),
3853 dest_mem
= get_memory_rtx (dest
, len
);
3854 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3855 builtin_strncpy_read_str
,
3856 CONST_CAST (char *, p
), dest_align
, false, 0);
3857 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3858 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3865 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3866 bytes from constant string DATA + OFFSET and return it as target
3870 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3871 enum machine_mode mode
)
3873 const char *c
= (const char *) data
;
3874 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3876 memset (p
, *c
, GET_MODE_SIZE (mode
));
3878 return c_readstr (p
, mode
);
3881 /* Callback routine for store_by_pieces. Return the RTL of a register
3882 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3883 char value given in the RTL register data. For example, if mode is
3884 4 bytes wide, return the RTL for 0x01010101*data. */
3887 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3888 enum machine_mode mode
)
3894 size
= GET_MODE_SIZE (mode
);
3898 p
= XALLOCAVEC (char, size
);
3899 memset (p
, 1, size
);
3900 coeff
= c_readstr (p
, mode
);
3902 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3903 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3904 return force_reg (mode
, target
);
3907 /* Expand expression EXP, which is a call to the memset builtin. Return
3908 NULL_RTX if we failed the caller should emit a normal call, otherwise
3909 try to get the result in TARGET, if convenient (and in mode MODE if that's
3913 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3915 if (!validate_arglist (exp
,
3916 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3920 tree dest
= CALL_EXPR_ARG (exp
, 0);
3921 tree val
= CALL_EXPR_ARG (exp
, 1);
3922 tree len
= CALL_EXPR_ARG (exp
, 2);
3923 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3927 /* Helper function to do the actual work for expand_builtin_memset. The
3928 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3929 so that this can also be called without constructing an actual CALL_EXPR.
3930 The other arguments and return value are the same as for
3931 expand_builtin_memset. */
3934 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3935 rtx target
, enum machine_mode mode
, tree orig_exp
)
3938 enum built_in_function fcode
;
3940 unsigned int dest_align
;
3941 rtx dest_mem
, dest_addr
, len_rtx
;
3942 HOST_WIDE_INT expected_size
= -1;
3943 unsigned int expected_align
= 0;
3944 tree_ann_common_t ann
;
3946 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3948 /* If DEST is not a pointer type, don't do this operation in-line. */
3949 if (dest_align
== 0)
3952 ann
= tree_common_ann (orig_exp
);
3954 stringop_block_profile (ann
->stmt
, &expected_align
, &expected_size
);
3956 if (expected_align
< dest_align
)
3957 expected_align
= dest_align
;
3959 /* If the LEN parameter is zero, return DEST. */
3960 if (integer_zerop (len
))
3962 /* Evaluate and ignore VAL in case it has side-effects. */
3963 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3964 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3967 /* Stabilize the arguments in case we fail. */
3968 dest
= builtin_save_expr (dest
);
3969 val
= builtin_save_expr (val
);
3970 len
= builtin_save_expr (len
);
3972 len_rtx
= expand_normal (len
);
3973 dest_mem
= get_memory_rtx (dest
, len
);
3975 if (TREE_CODE (val
) != INTEGER_CST
)
3979 val_rtx
= expand_normal (val
);
3980 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3983 /* Assume that we can memset by pieces if we can store
3984 * the coefficients by pieces (in the required modes).
3985 * We can't pass builtin_memset_gen_str as that emits RTL. */
3987 if (host_integerp (len
, 1)
3988 && can_store_by_pieces (tree_low_cst (len
, 1),
3989 builtin_memset_read_str
, &c
, dest_align
,
3992 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3994 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3995 builtin_memset_gen_str
, val_rtx
, dest_align
,
3998 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3999 dest_align
, expected_align
,
4003 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4004 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4008 if (target_char_cast (val
, &c
))
4013 if (host_integerp (len
, 1)
4014 && can_store_by_pieces (tree_low_cst (len
, 1),
4015 builtin_memset_read_str
, &c
, dest_align
,
4017 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
4018 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4019 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
4020 dest_align
, expected_align
,
4024 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4025 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4029 set_mem_align (dest_mem
, dest_align
);
4030 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4031 CALL_EXPR_TAILCALL (orig_exp
)
4032 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4033 expected_align
, expected_size
);
4037 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4038 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4044 fndecl
= get_callee_fndecl (orig_exp
);
4045 fcode
= DECL_FUNCTION_CODE (fndecl
);
4046 if (fcode
== BUILT_IN_MEMSET
)
4047 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
4048 else if (fcode
== BUILT_IN_BZERO
)
4049 fn
= build_call_expr (fndecl
, 2, dest
, len
);
4052 if (TREE_CODE (fn
) == CALL_EXPR
)
4053 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4054 return expand_call (fn
, target
, target
== const0_rtx
);
4057 /* Expand expression EXP, which is a call to the bzero builtin. Return
4058 NULL_RTX if we failed the caller should emit a normal call. */
4061 expand_builtin_bzero (tree exp
)
4065 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4068 dest
= CALL_EXPR_ARG (exp
, 0);
4069 size
= CALL_EXPR_ARG (exp
, 1);
4071 /* New argument list transforming bzero(ptr x, int y) to
4072 memset(ptr x, int 0, size_t y). This is done this way
4073 so that if it isn't expanded inline, we fallback to
4074 calling bzero instead of memset. */
4076 return expand_builtin_memset_args (dest
, integer_zero_node
,
4077 fold_convert (sizetype
, size
),
4078 const0_rtx
, VOIDmode
, exp
);
4081 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4082 caller should emit a normal call, otherwise try to get the result
4083 in TARGET, if convenient (and in mode MODE if that's convenient). */
4086 expand_builtin_memchr (tree exp
, rtx target
, enum machine_mode mode
)
4088 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
,
4089 INTEGER_TYPE
, VOID_TYPE
))
4091 tree type
= TREE_TYPE (exp
);
4092 tree result
= fold_builtin_memchr (CALL_EXPR_ARG (exp
, 0),
4093 CALL_EXPR_ARG (exp
, 1),
4094 CALL_EXPR_ARG (exp
, 2), type
);
4096 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4101 /* Expand expression EXP, which is a call to the memcmp built-in function.
4102 Return NULL_RTX if we failed and the
4103 caller should emit a normal call, otherwise try to get the result in
4104 TARGET, if convenient (and in mode MODE, if that's convenient). */
4107 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
4109 if (!validate_arglist (exp
,
4110 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4114 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
4115 CALL_EXPR_ARG (exp
, 1),
4116 CALL_EXPR_ARG (exp
, 2));
4118 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4121 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4123 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4126 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4127 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4128 tree len
= CALL_EXPR_ARG (exp
, 2);
4131 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4133 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4134 enum machine_mode insn_mode
;
4136 #ifdef HAVE_cmpmemsi
4138 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4141 #ifdef HAVE_cmpstrnsi
4143 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4148 /* If we don't have POINTER_TYPE, call the function. */
4149 if (arg1_align
== 0 || arg2_align
== 0)
4152 /* Make a place to write the result of the instruction. */
4155 && REG_P (result
) && GET_MODE (result
) == insn_mode
4156 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4157 result
= gen_reg_rtx (insn_mode
);
4159 arg1_rtx
= get_memory_rtx (arg1
, len
);
4160 arg2_rtx
= get_memory_rtx (arg2
, len
);
4161 arg3_rtx
= expand_normal (len
);
4163 /* Set MEM_SIZE as appropriate. */
4164 if (GET_CODE (arg3_rtx
) == CONST_INT
)
4166 set_mem_size (arg1_rtx
, arg3_rtx
);
4167 set_mem_size (arg2_rtx
, arg3_rtx
);
4170 #ifdef HAVE_cmpmemsi
4172 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4173 GEN_INT (MIN (arg1_align
, arg2_align
)));
4176 #ifdef HAVE_cmpstrnsi
4178 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4179 GEN_INT (MIN (arg1_align
, arg2_align
)));
4187 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4188 TYPE_MODE (integer_type_node
), 3,
4189 XEXP (arg1_rtx
, 0), Pmode
,
4190 XEXP (arg2_rtx
, 0), Pmode
,
4191 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4192 TYPE_UNSIGNED (sizetype
)),
4193 TYPE_MODE (sizetype
));
4195 /* Return the value in the proper mode for this function. */
4196 mode
= TYPE_MODE (TREE_TYPE (exp
));
4197 if (GET_MODE (result
) == mode
)
4199 else if (target
!= 0)
4201 convert_move (target
, result
, 0);
4205 return convert_to_mode (mode
, result
, 0);
4212 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4213 if we failed the caller should emit a normal call, otherwise try to get
4214 the result in TARGET, if convenient. */
4217 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4219 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4223 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4224 CALL_EXPR_ARG (exp
, 1));
4226 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4229 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4230 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4231 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4233 rtx arg1_rtx
, arg2_rtx
;
4234 rtx result
, insn
= NULL_RTX
;
4236 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4237 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4240 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4242 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4244 /* If we don't have POINTER_TYPE, call the function. */
4245 if (arg1_align
== 0 || arg2_align
== 0)
4248 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4249 arg1
= builtin_save_expr (arg1
);
4250 arg2
= builtin_save_expr (arg2
);
4252 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4253 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4255 #ifdef HAVE_cmpstrsi
4256 /* Try to call cmpstrsi. */
4259 enum machine_mode insn_mode
4260 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4262 /* Make a place to write the result of the instruction. */
4265 && REG_P (result
) && GET_MODE (result
) == insn_mode
4266 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4267 result
= gen_reg_rtx (insn_mode
);
4269 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4270 GEN_INT (MIN (arg1_align
, arg2_align
)));
4273 #ifdef HAVE_cmpstrnsi
4274 /* Try to determine at least one length and call cmpstrnsi. */
4275 if (!insn
&& HAVE_cmpstrnsi
)
4280 enum machine_mode insn_mode
4281 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4282 tree len1
= c_strlen (arg1
, 1);
4283 tree len2
= c_strlen (arg2
, 1);
4286 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4288 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4290 /* If we don't have a constant length for the first, use the length
4291 of the second, if we know it. We don't require a constant for
4292 this case; some cost analysis could be done if both are available
4293 but neither is constant. For now, assume they're equally cheap,
4294 unless one has side effects. If both strings have constant lengths,
4301 else if (TREE_SIDE_EFFECTS (len1
))
4303 else if (TREE_SIDE_EFFECTS (len2
))
4305 else if (TREE_CODE (len1
) != INTEGER_CST
)
4307 else if (TREE_CODE (len2
) != INTEGER_CST
)
4309 else if (tree_int_cst_lt (len1
, len2
))
4314 /* If both arguments have side effects, we cannot optimize. */
4315 if (!len
|| TREE_SIDE_EFFECTS (len
))
4318 arg3_rtx
= expand_normal (len
);
4320 /* Make a place to write the result of the instruction. */
4323 && REG_P (result
) && GET_MODE (result
) == insn_mode
4324 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4325 result
= gen_reg_rtx (insn_mode
);
4327 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4328 GEN_INT (MIN (arg1_align
, arg2_align
)));
4336 /* Return the value in the proper mode for this function. */
4337 mode
= TYPE_MODE (TREE_TYPE (exp
));
4338 if (GET_MODE (result
) == mode
)
4341 return convert_to_mode (mode
, result
, 0);
4342 convert_move (target
, result
, 0);
4346 /* Expand the library call ourselves using a stabilized argument
4347 list to avoid re-evaluating the function's arguments twice. */
4348 #ifdef HAVE_cmpstrnsi
4351 fndecl
= get_callee_fndecl (exp
);
4352 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4353 if (TREE_CODE (fn
) == CALL_EXPR
)
4354 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4355 return expand_call (fn
, target
, target
== const0_rtx
);
4361 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4362 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4363 the result in TARGET, if convenient. */
4366 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4368 if (!validate_arglist (exp
,
4369 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4373 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4374 CALL_EXPR_ARG (exp
, 1),
4375 CALL_EXPR_ARG (exp
, 2));
4377 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4380 /* If c_strlen can determine an expression for one of the string
4381 lengths, and it doesn't have side effects, then emit cmpstrnsi
4382 using length MIN(strlen(string)+1, arg3). */
4383 #ifdef HAVE_cmpstrnsi
4386 tree len
, len1
, len2
;
4387 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4390 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4391 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4392 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4395 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4397 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4398 enum machine_mode insn_mode
4399 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4401 len1
= c_strlen (arg1
, 1);
4402 len2
= c_strlen (arg2
, 1);
4405 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4407 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4409 /* If we don't have a constant length for the first, use the length
4410 of the second, if we know it. We don't require a constant for
4411 this case; some cost analysis could be done if both are available
4412 but neither is constant. For now, assume they're equally cheap,
4413 unless one has side effects. If both strings have constant lengths,
4420 else if (TREE_SIDE_EFFECTS (len1
))
4422 else if (TREE_SIDE_EFFECTS (len2
))
4424 else if (TREE_CODE (len1
) != INTEGER_CST
)
4426 else if (TREE_CODE (len2
) != INTEGER_CST
)
4428 else if (tree_int_cst_lt (len1
, len2
))
4433 /* If both arguments have side effects, we cannot optimize. */
4434 if (!len
|| TREE_SIDE_EFFECTS (len
))
4437 /* The actual new length parameter is MIN(len,arg3). */
4438 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4439 fold_convert (TREE_TYPE (len
), arg3
));
4441 /* If we don't have POINTER_TYPE, call the function. */
4442 if (arg1_align
== 0 || arg2_align
== 0)
4445 /* Make a place to write the result of the instruction. */
4448 && REG_P (result
) && GET_MODE (result
) == insn_mode
4449 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4450 result
= gen_reg_rtx (insn_mode
);
4452 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4453 arg1
= builtin_save_expr (arg1
);
4454 arg2
= builtin_save_expr (arg2
);
4455 len
= builtin_save_expr (len
);
4457 arg1_rtx
= get_memory_rtx (arg1
, len
);
4458 arg2_rtx
= get_memory_rtx (arg2
, len
);
4459 arg3_rtx
= expand_normal (len
);
4460 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4461 GEN_INT (MIN (arg1_align
, arg2_align
)));
4466 /* Return the value in the proper mode for this function. */
4467 mode
= TYPE_MODE (TREE_TYPE (exp
));
4468 if (GET_MODE (result
) == mode
)
4471 return convert_to_mode (mode
, result
, 0);
4472 convert_move (target
, result
, 0);
4476 /* Expand the library call ourselves using a stabilized argument
4477 list to avoid re-evaluating the function's arguments twice. */
4478 fndecl
= get_callee_fndecl (exp
);
4479 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4480 if (TREE_CODE (fn
) == CALL_EXPR
)
4481 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4482 return expand_call (fn
, target
, target
== const0_rtx
);
4488 /* Expand expression EXP, which is a call to the strcat builtin.
4489 Return NULL_RTX if we failed the caller should emit a normal call,
4490 otherwise try to get the result in TARGET, if convenient. */
4493 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4495 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4499 tree dst
= CALL_EXPR_ARG (exp
, 0);
4500 tree src
= CALL_EXPR_ARG (exp
, 1);
4501 const char *p
= c_getstr (src
);
4503 /* If the string length is zero, return the dst parameter. */
4504 if (p
&& *p
== '\0')
4505 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4507 if (optimize_insn_for_speed_p ())
4509 /* See if we can store by pieces into (dst + strlen(dst)). */
4510 tree newsrc
, newdst
,
4511 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4514 /* Stabilize the argument list. */
4515 newsrc
= builtin_save_expr (src
);
4516 dst
= builtin_save_expr (dst
);
4520 /* Create strlen (dst). */
4521 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4522 /* Create (dst p+ strlen (dst)). */
4524 newdst
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4525 newdst
= builtin_save_expr (newdst
);
4527 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4529 end_sequence (); /* Stop sequence. */
4533 /* Output the entire sequence. */
4534 insns
= get_insns ();
4538 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4545 /* Expand expression EXP, which is a call to the strncat builtin.
4546 Return NULL_RTX if we failed the caller should emit a normal call,
4547 otherwise try to get the result in TARGET, if convenient. */
4550 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4552 if (validate_arglist (exp
,
4553 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4555 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4556 CALL_EXPR_ARG (exp
, 1),
4557 CALL_EXPR_ARG (exp
, 2));
4559 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4564 /* Expand expression EXP, which is a call to the strspn builtin.
4565 Return NULL_RTX if we failed the caller should emit a normal call,
4566 otherwise try to get the result in TARGET, if convenient. */
4569 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4571 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4573 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4574 CALL_EXPR_ARG (exp
, 1));
4576 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4581 /* Expand expression EXP, which is a call to the strcspn builtin.
4582 Return NULL_RTX if we failed the caller should emit a normal call,
4583 otherwise try to get the result in TARGET, if convenient. */
4586 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4588 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4590 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4591 CALL_EXPR_ARG (exp
, 1));
4593 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4598 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4599 if that's convenient. */
4602 expand_builtin_saveregs (void)
4606 /* Don't do __builtin_saveregs more than once in a function.
4607 Save the result of the first call and reuse it. */
4608 if (saveregs_value
!= 0)
4609 return saveregs_value
;
4611 /* When this function is called, it means that registers must be
4612 saved on entry to this function. So we migrate the call to the
4613 first insn of this function. */
4617 /* Do whatever the machine needs done in this case. */
4618 val
= targetm
.calls
.expand_builtin_saveregs ();
4623 saveregs_value
= val
;
4625 /* Put the insns after the NOTE that starts the function. If this
4626 is inside a start_sequence, make the outer-level insn chain current, so
4627 the code is placed at the start of the function. */
4628 push_topmost_sequence ();
4629 emit_insn_after (seq
, entry_of_function ());
4630 pop_topmost_sequence ();
4635 /* __builtin_args_info (N) returns word N of the arg space info
4636 for the current function. The number and meanings of words
4637 is controlled by the definition of CUMULATIVE_ARGS. */
4640 expand_builtin_args_info (tree exp
)
4642 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4643 int *word_ptr
= (int *) &crtl
->args
.info
;
4645 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4647 if (call_expr_nargs (exp
) != 0)
4649 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4650 error ("argument of %<__builtin_args_info%> must be constant");
4653 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4655 if (wordnum
< 0 || wordnum
>= nwords
)
4656 error ("argument of %<__builtin_args_info%> out of range");
4658 return GEN_INT (word_ptr
[wordnum
]);
4662 error ("missing argument in %<__builtin_args_info%>");
4667 /* Expand a call to __builtin_next_arg. */
4670 expand_builtin_next_arg (void)
4672 /* Checking arguments is already done in fold_builtin_next_arg
4673 that must be called before this function. */
4674 return expand_binop (ptr_mode
, add_optab
,
4675 crtl
->args
.internal_arg_pointer
,
4676 crtl
->args
.arg_offset_rtx
,
4677 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4680 /* Make it easier for the backends by protecting the valist argument
4681 from multiple evaluations. */
4684 stabilize_va_list (tree valist
, int needs_lvalue
)
4686 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4688 gcc_assert (vatype
!= NULL_TREE
);
4690 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4692 if (TREE_SIDE_EFFECTS (valist
))
4693 valist
= save_expr (valist
);
4695 /* For this case, the backends will be expecting a pointer to
4696 vatype, but it's possible we've actually been given an array
4697 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4699 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4701 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4702 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4711 if (! TREE_SIDE_EFFECTS (valist
))
4714 pt
= build_pointer_type (vatype
);
4715 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4716 TREE_SIDE_EFFECTS (valist
) = 1;
4719 if (TREE_SIDE_EFFECTS (valist
))
4720 valist
= save_expr (valist
);
4721 valist
= build_fold_indirect_ref (valist
);
4727 /* The "standard" definition of va_list is void*. */
4730 std_build_builtin_va_list (void)
4732 return ptr_type_node
;
4735 /* The "standard" abi va_list is va_list_type_node. */
4738 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4740 return va_list_type_node
;
4743 /* The "standard" type of va_list is va_list_type_node. */
4746 std_canonical_va_list_type (tree type
)
4750 if (INDIRECT_REF_P (type
))
4751 type
= TREE_TYPE (type
);
4752 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4753 type
= TREE_TYPE (type
);
4754 wtype
= va_list_type_node
;
4756 /* Treat structure va_list types. */
4757 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4758 htype
= TREE_TYPE (htype
);
4759 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4761 /* If va_list is an array type, the argument may have decayed
4762 to a pointer type, e.g. by being passed to another function.
4763 In that case, unwrap both types so that we can compare the
4764 underlying records. */
4765 if (TREE_CODE (htype
) == ARRAY_TYPE
4766 || POINTER_TYPE_P (htype
))
4768 wtype
= TREE_TYPE (wtype
);
4769 htype
= TREE_TYPE (htype
);
4772 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4773 return va_list_type_node
;
4778 /* The "standard" implementation of va_start: just assign `nextarg' to
4782 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4784 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4785 convert_move (va_r
, nextarg
, 0);
4788 /* Expand EXP, a call to __builtin_va_start. */
4791 expand_builtin_va_start (tree exp
)
4796 if (call_expr_nargs (exp
) < 2)
4798 error ("too few arguments to function %<va_start%>");
4802 if (fold_builtin_next_arg (exp
, true))
4805 nextarg
= expand_builtin_next_arg ();
4806 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4808 if (targetm
.expand_builtin_va_start
)
4809 targetm
.expand_builtin_va_start (valist
, nextarg
);
4811 std_expand_builtin_va_start (valist
, nextarg
);
4816 /* The "standard" implementation of va_arg: read the value from the
4817 current (padded) address and increment by the (padded) size. */
4820 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4823 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4824 unsigned HOST_WIDE_INT align
, boundary
;
4827 #ifdef ARGS_GROW_DOWNWARD
4828 /* All of the alignment and movement below is for args-grow-up machines.
4829 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4830 implement their own specialized gimplify_va_arg_expr routines. */
4834 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4836 type
= build_pointer_type (type
);
4838 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4839 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
);
4841 /* When we align parameter on stack for caller, if the parameter
4842 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4843 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4844 here with caller. */
4845 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4846 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4848 boundary
/= BITS_PER_UNIT
;
4850 /* Hoist the valist value into a temporary for the moment. */
4851 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4853 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4854 requires greater alignment, we must perform dynamic alignment. */
4855 if (boundary
> align
4856 && !integer_zerop (TYPE_SIZE (type
)))
4858 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4859 fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
),
4860 valist_tmp
, size_int (boundary
- 1)));
4861 gimplify_and_add (t
, pre_p
);
4863 t
= fold_convert (sizetype
, valist_tmp
);
4864 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4865 fold_convert (TREE_TYPE (valist
),
4866 fold_build2 (BIT_AND_EXPR
, sizetype
, t
,
4867 size_int (-boundary
))));
4868 gimplify_and_add (t
, pre_p
);
4873 /* If the actual alignment is less than the alignment of the type,
4874 adjust the type accordingly so that we don't assume strict alignment
4875 when dereferencing the pointer. */
4876 boundary
*= BITS_PER_UNIT
;
4877 if (boundary
< TYPE_ALIGN (type
))
4879 type
= build_variant_type_copy (type
);
4880 TYPE_ALIGN (type
) = boundary
;
4883 /* Compute the rounded size of the type. */
4884 type_size
= size_in_bytes (type
);
4885 rounded_size
= round_up (type_size
, align
);
4887 /* Reduce rounded_size so it's sharable with the postqueue. */
4888 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4892 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4894 /* Small args are padded downward. */
4895 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4896 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4897 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4898 addr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4901 /* Compute new value for AP. */
4902 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, rounded_size
);
4903 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4904 gimplify_and_add (t
, pre_p
);
4906 addr
= fold_convert (build_pointer_type (type
), addr
);
4909 addr
= build_va_arg_indirect_ref (addr
);
4911 return build_va_arg_indirect_ref (addr
);
4914 /* Build an indirect-ref expression over the given TREE, which represents a
4915 piece of a va_arg() expansion. */
4917 build_va_arg_indirect_ref (tree addr
)
4919 addr
= build_fold_indirect_ref (addr
);
4921 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4927 /* Return a dummy expression of type TYPE in order to keep going after an
4931 dummy_object (tree type
)
4933 tree t
= build_int_cst (build_pointer_type (type
), 0);
4934 return build1 (INDIRECT_REF
, type
, t
);
4937 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4938 builtin function, but a very special sort of operator. */
4940 enum gimplify_status
4941 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4943 tree promoted_type
, have_va_type
;
4944 tree valist
= TREE_OPERAND (*expr_p
, 0);
4945 tree type
= TREE_TYPE (*expr_p
);
4948 /* Verify that valist is of the proper type. */
4949 have_va_type
= TREE_TYPE (valist
);
4950 if (have_va_type
== error_mark_node
)
4952 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4954 if (have_va_type
== NULL_TREE
)
4956 error ("first argument to %<va_arg%> not of type %<va_list%>");
4960 /* Generate a diagnostic for requesting data of a type that cannot
4961 be passed through `...' due to type promotion at the call site. */
4962 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4965 static bool gave_help
;
4968 /* Unfortunately, this is merely undefined, rather than a constraint
4969 violation, so we cannot make this an error. If this call is never
4970 executed, the program is still strictly conforming. */
4971 warned
= warning (0, "%qT is promoted to %qT when passed through %<...%>",
4972 type
, promoted_type
);
4973 if (!gave_help
&& warned
)
4976 inform (input_location
, "(so you should pass %qT not %qT to %<va_arg%>)",
4977 promoted_type
, type
);
4980 /* We can, however, treat "undefined" any way we please.
4981 Call abort to encourage the user to fix the program. */
4983 inform (input_location
, "if this code is reached, the program will abort");
4984 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4985 gimplify_and_add (t
, pre_p
);
4987 /* This is dead code, but go ahead and finish so that the
4988 mode of the result comes out right. */
4989 *expr_p
= dummy_object (type
);
4994 /* Make it easier for the backends by protecting the valist argument
4995 from multiple evaluations. */
4996 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4998 /* For this case, the backends will be expecting a pointer to
4999 TREE_TYPE (abi), but it's possible we've
5000 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5002 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
5004 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
5005 valist
= build_fold_addr_expr_with_type (valist
, p1
);
5008 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
5011 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
5013 if (!targetm
.gimplify_va_arg_expr
)
5014 /* FIXME: Once most targets are converted we should merely
5015 assert this is non-null. */
5018 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
5023 /* Expand EXP, a call to __builtin_va_end. */
5026 expand_builtin_va_end (tree exp
)
5028 tree valist
= CALL_EXPR_ARG (exp
, 0);
5030 /* Evaluate for side effects, if needed. I hate macros that don't
5032 if (TREE_SIDE_EFFECTS (valist
))
5033 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5038 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5039 builtin rather than just as an assignment in stdarg.h because of the
5040 nastiness of array-type va_list types. */
5043 expand_builtin_va_copy (tree exp
)
5047 dst
= CALL_EXPR_ARG (exp
, 0);
5048 src
= CALL_EXPR_ARG (exp
, 1);
5050 dst
= stabilize_va_list (dst
, 1);
5051 src
= stabilize_va_list (src
, 0);
5053 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5055 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5057 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5058 TREE_SIDE_EFFECTS (t
) = 1;
5059 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5063 rtx dstb
, srcb
, size
;
5065 /* Evaluate to pointers. */
5066 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5067 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5068 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5069 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5071 dstb
= convert_memory_address (Pmode
, dstb
);
5072 srcb
= convert_memory_address (Pmode
, srcb
);
5074 /* "Dereference" to BLKmode memories. */
5075 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5076 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5077 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5078 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5079 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5080 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5083 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5089 /* Expand a call to one of the builtin functions __builtin_frame_address or
5090 __builtin_return_address. */
5093 expand_builtin_frame_address (tree fndecl
, tree exp
)
5095 /* The argument must be a nonnegative integer constant.
5096 It counts the number of frames to scan up the stack.
5097 The value is the return address saved in that frame. */
5098 if (call_expr_nargs (exp
) == 0)
5099 /* Warning about missing arg was already issued. */
5101 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
5103 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5104 error ("invalid argument to %<__builtin_frame_address%>");
5106 error ("invalid argument to %<__builtin_return_address%>");
5112 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
5113 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
5115 /* Some ports cannot access arbitrary stack frames. */
5118 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5119 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5121 warning (0, "unsupported argument to %<__builtin_return_address%>");
5125 /* For __builtin_frame_address, return what we've got. */
5126 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5130 && ! CONSTANT_P (tem
))
5131 tem
= copy_to_mode_reg (Pmode
, tem
);
5136 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5137 we failed and the caller should emit a normal call, otherwise try to get
5138 the result in TARGET, if convenient. */
5141 expand_builtin_alloca (tree exp
, rtx target
)
5146 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5147 should always expand to function calls. These can be intercepted
5152 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5155 /* Compute the argument. */
5156 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5158 /* Allocate the desired space. */
5159 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5160 result
= convert_memory_address (ptr_mode
, result
);
5165 /* Expand a call to a bswap builtin with argument ARG0. MODE
5166 is the mode to expand with. */
5169 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5171 enum machine_mode mode
;
5175 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5178 arg
= CALL_EXPR_ARG (exp
, 0);
5179 mode
= TYPE_MODE (TREE_TYPE (arg
));
5180 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5182 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5184 gcc_assert (target
);
5186 return convert_to_mode (mode
, target
, 0);
5189 /* Expand a call to a unary builtin in EXP.
5190 Return NULL_RTX if a normal call should be emitted rather than expanding the
5191 function in-line. If convenient, the result should be placed in TARGET.
5192 SUBTARGET may be used as the target for computing one of EXP's operands. */
5195 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5196 rtx subtarget
, optab op_optab
)
5200 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5203 /* Compute the argument. */
5204 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
5205 VOIDmode
, EXPAND_NORMAL
);
5206 /* Compute op, into TARGET if possible.
5207 Set TARGET to wherever the result comes back. */
5208 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5209 op_optab
, op0
, target
, 1);
5210 gcc_assert (target
);
5212 return convert_to_mode (target_mode
, target
, 0);
5215 /* If the string passed to fputs is a constant and is one character
5216 long, we attempt to transform this call into __builtin_fputc(). */
5219 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5221 /* Verify the arguments in the original call. */
5222 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5224 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5225 CALL_EXPR_ARG (exp
, 1),
5226 (target
== const0_rtx
),
5227 unlocked
, NULL_TREE
);
5229 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5234 /* Expand a call to __builtin_expect. We just return our argument
5235 as the builtin_expect semantic should've been already executed by
5236 tree branch prediction pass. */
5239 expand_builtin_expect (tree exp
, rtx target
)
5243 if (call_expr_nargs (exp
) < 2)
5245 arg
= CALL_EXPR_ARG (exp
, 0);
5246 c
= CALL_EXPR_ARG (exp
, 1);
5248 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5249 /* When guessing was done, the hints should be already stripped away. */
5250 gcc_assert (!flag_guess_branch_prob
5251 || optimize
== 0 || errorcount
|| sorrycount
);
5256 expand_builtin_trap (void)
5260 emit_insn (gen_trap ());
5263 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5267 /* Expand EXP, a call to fabs, fabsf or fabsl.
5268 Return NULL_RTX if a normal call should be emitted rather than expanding
5269 the function inline. If convenient, the result should be placed
5270 in TARGET. SUBTARGET may be used as the target for computing
5274 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5276 enum machine_mode mode
;
5280 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5283 arg
= CALL_EXPR_ARG (exp
, 0);
5284 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5285 mode
= TYPE_MODE (TREE_TYPE (arg
));
5286 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5287 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5290 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5291 Return NULL is a normal call should be emitted rather than expanding the
5292 function inline. If convenient, the result should be placed in TARGET.
5293 SUBTARGET may be used as the target for computing the operand. */
5296 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5301 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5304 arg
= CALL_EXPR_ARG (exp
, 0);
5305 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5307 arg
= CALL_EXPR_ARG (exp
, 1);
5308 op1
= expand_normal (arg
);
5310 return expand_copysign (op0
, op1
, target
);
5313 /* Create a new constant string literal and return a char* pointer to it.
5314 The STRING_CST value is the LEN characters at STR. */
5316 build_string_literal (int len
, const char *str
)
5318 tree t
, elem
, index
, type
;
5320 t
= build_string (len
, str
);
5321 elem
= build_type_variant (char_type_node
, 1, 0);
5322 index
= build_index_type (size_int (len
- 1));
5323 type
= build_array_type (elem
, index
);
5324 TREE_TYPE (t
) = type
;
5325 TREE_CONSTANT (t
) = 1;
5326 TREE_READONLY (t
) = 1;
5327 TREE_STATIC (t
) = 1;
5329 type
= build_pointer_type (elem
);
5330 t
= build1 (ADDR_EXPR
, type
,
5331 build4 (ARRAY_REF
, elem
,
5332 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
5336 /* Expand EXP, a call to printf or printf_unlocked.
5337 Return NULL_RTX if a normal call should be emitted rather than transforming
5338 the function inline. If convenient, the result should be placed in
5339 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5342 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5345 /* If we're using an unlocked function, assume the other unlocked
5346 functions exist explicitly. */
5347 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5348 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5349 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5350 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5351 const char *fmt_str
;
5354 int nargs
= call_expr_nargs (exp
);
5356 /* If the return value is used, don't do the transformation. */
5357 if (target
!= const0_rtx
)
5360 /* Verify the required arguments in the original call. */
5363 fmt
= CALL_EXPR_ARG (exp
, 0);
5364 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5367 /* Check whether the format is a literal string constant. */
5368 fmt_str
= c_getstr (fmt
);
5369 if (fmt_str
== NULL
)
5372 if (!init_target_chars ())
5375 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5376 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5379 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5382 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5384 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5385 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5388 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5391 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5395 /* We can't handle anything else with % args or %% ... yet. */
5396 if (strchr (fmt_str
, target_percent
))
5402 /* If the format specifier was "", printf does nothing. */
5403 if (fmt_str
[0] == '\0')
5405 /* If the format specifier has length of 1, call putchar. */
5406 if (fmt_str
[1] == '\0')
5408 /* Given printf("c"), (where c is any one character,)
5409 convert "c"[0] to an int and pass that to the replacement
5411 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5413 fn
= build_call_expr (fn_putchar
, 1, arg
);
5417 /* If the format specifier was "string\n", call puts("string"). */
5418 size_t len
= strlen (fmt_str
);
5419 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5421 /* Create a NUL-terminated string that's one char shorter
5422 than the original, stripping off the trailing '\n'. */
5423 char *newstr
= XALLOCAVEC (char, len
);
5424 memcpy (newstr
, fmt_str
, len
- 1);
5425 newstr
[len
- 1] = 0;
5426 arg
= build_string_literal (len
, newstr
);
5428 fn
= build_call_expr (fn_puts
, 1, arg
);
5431 /* We'd like to arrange to call fputs(string,stdout) here,
5432 but we need stdout and don't have a way to get it yet. */
5439 if (TREE_CODE (fn
) == CALL_EXPR
)
5440 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5441 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5444 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5445 Return NULL_RTX if a normal call should be emitted rather than transforming
5446 the function inline. If convenient, the result should be placed in
5447 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5450 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5453 /* If we're using an unlocked function, assume the other unlocked
5454 functions exist explicitly. */
5455 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5456 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5457 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5458 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5459 const char *fmt_str
;
5462 int nargs
= call_expr_nargs (exp
);
5464 /* If the return value is used, don't do the transformation. */
5465 if (target
!= const0_rtx
)
5468 /* Verify the required arguments in the original call. */
5471 fp
= CALL_EXPR_ARG (exp
, 0);
5472 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5474 fmt
= CALL_EXPR_ARG (exp
, 1);
5475 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5478 /* Check whether the format is a literal string constant. */
5479 fmt_str
= c_getstr (fmt
);
5480 if (fmt_str
== NULL
)
5483 if (!init_target_chars ())
5486 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5487 if (strcmp (fmt_str
, target_percent_s
) == 0)
5490 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5492 arg
= CALL_EXPR_ARG (exp
, 2);
5494 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5496 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5497 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5500 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5502 arg
= CALL_EXPR_ARG (exp
, 2);
5504 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5508 /* We can't handle anything else with % args or %% ... yet. */
5509 if (strchr (fmt_str
, target_percent
))
5515 /* If the format specifier was "", fprintf does nothing. */
5516 if (fmt_str
[0] == '\0')
5518 /* Evaluate and ignore FILE* argument for side-effects. */
5519 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5523 /* When "string" doesn't contain %, replace all cases of
5524 fprintf(stream,string) with fputs(string,stream). The fputs
5525 builtin will take care of special cases like length == 1. */
5527 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5532 if (TREE_CODE (fn
) == CALL_EXPR
)
5533 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5534 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5537 /* Expand a call EXP to sprintf. Return NULL_RTX if
5538 a normal call should be emitted rather than expanding the function
5539 inline. If convenient, the result should be placed in TARGET with
5543 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5546 const char *fmt_str
;
5547 int nargs
= call_expr_nargs (exp
);
5549 /* Verify the required arguments in the original call. */
5552 dest
= CALL_EXPR_ARG (exp
, 0);
5553 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5555 fmt
= CALL_EXPR_ARG (exp
, 0);
5556 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5559 /* Check whether the format is a literal string constant. */
5560 fmt_str
= c_getstr (fmt
);
5561 if (fmt_str
== NULL
)
5564 if (!init_target_chars ())
5567 /* If the format doesn't contain % args or %%, use strcpy. */
5568 if (strchr (fmt_str
, target_percent
) == 0)
5570 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5573 if ((nargs
> 2) || ! fn
)
5575 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5576 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5577 if (target
== const0_rtx
)
5579 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5580 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5582 /* If the format is "%s", use strcpy if the result isn't used. */
5583 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5586 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5592 arg
= CALL_EXPR_ARG (exp
, 2);
5593 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5596 if (target
!= const0_rtx
)
5598 len
= c_strlen (arg
, 1);
5599 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5605 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5606 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5608 if (target
== const0_rtx
)
5610 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5616 /* Expand a call to either the entry or exit function profiler. */
5619 expand_builtin_profile_func (bool exitp
)
5621 rtx this_rtx
, which
;
5623 this_rtx
= DECL_RTL (current_function_decl
);
5624 gcc_assert (MEM_P (this_rtx
));
5625 this_rtx
= XEXP (this_rtx
, 0);
5628 which
= profile_function_exit_libfunc
;
5630 which
= profile_function_entry_libfunc
;
5632 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this_rtx
, Pmode
,
5633 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5640 /* Expand a call to __builtin___clear_cache. */
5643 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
5645 #ifndef HAVE_clear_cache
5646 #ifdef CLEAR_INSN_CACHE
5647 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5648 does something. Just do the default expansion to a call to
5652 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5653 does nothing. There is no need to call it. Do nothing. */
5655 #endif /* CLEAR_INSN_CACHE */
5657 /* We have a "clear_cache" insn, and it will handle everything. */
5659 rtx begin_rtx
, end_rtx
;
5660 enum insn_code icode
;
5662 /* We must not expand to a library call. If we did, any
5663 fallback library function in libgcc that might contain a call to
5664 __builtin___clear_cache() would recurse infinitely. */
5665 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5667 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5671 if (HAVE_clear_cache
)
5673 icode
= CODE_FOR_clear_cache
;
5675 begin
= CALL_EXPR_ARG (exp
, 0);
5676 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5677 begin_rtx
= convert_memory_address (Pmode
, begin_rtx
);
5678 if (!insn_data
[icode
].operand
[0].predicate (begin_rtx
, Pmode
))
5679 begin_rtx
= copy_to_mode_reg (Pmode
, begin_rtx
);
5681 end
= CALL_EXPR_ARG (exp
, 1);
5682 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5683 end_rtx
= convert_memory_address (Pmode
, end_rtx
);
5684 if (!insn_data
[icode
].operand
[1].predicate (end_rtx
, Pmode
))
5685 end_rtx
= copy_to_mode_reg (Pmode
, end_rtx
);
5687 emit_insn (gen_clear_cache (begin_rtx
, end_rtx
));
5690 #endif /* HAVE_clear_cache */
5693 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5696 round_trampoline_addr (rtx tramp
)
5698 rtx temp
, addend
, mask
;
5700 /* If we don't need too much alignment, we'll have been guaranteed
5701 proper alignment by get_trampoline_type. */
5702 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5705 /* Round address up to desired boundary. */
5706 temp
= gen_reg_rtx (Pmode
);
5707 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5708 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5710 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5711 temp
, 0, OPTAB_LIB_WIDEN
);
5712 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5713 temp
, 0, OPTAB_LIB_WIDEN
);
5719 expand_builtin_init_trampoline (tree exp
)
5721 tree t_tramp
, t_func
, t_chain
;
5722 rtx r_tramp
, r_func
, r_chain
;
5723 #ifdef TRAMPOLINE_TEMPLATE
5727 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5728 POINTER_TYPE
, VOID_TYPE
))
5731 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5732 t_func
= CALL_EXPR_ARG (exp
, 1);
5733 t_chain
= CALL_EXPR_ARG (exp
, 2);
5735 r_tramp
= expand_normal (t_tramp
);
5736 r_func
= expand_normal (t_func
);
5737 r_chain
= expand_normal (t_chain
);
5739 /* Generate insns to initialize the trampoline. */
5740 r_tramp
= round_trampoline_addr (r_tramp
);
5741 #ifdef TRAMPOLINE_TEMPLATE
5742 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5743 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5744 emit_block_move (blktramp
, assemble_trampoline_template (),
5745 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5747 trampolines_created
= 1;
5748 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5754 expand_builtin_adjust_trampoline (tree exp
)
5758 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5761 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5762 tramp
= round_trampoline_addr (tramp
);
5763 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5764 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5770 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5771 function. The function first checks whether the back end provides
5772 an insn to implement signbit for the respective mode. If not, it
5773 checks whether the floating point format of the value is such that
5774 the sign bit can be extracted. If that is not the case, the
5775 function returns NULL_RTX to indicate that a normal call should be
5776 emitted rather than expanding the function in-line. EXP is the
5777 expression that is a call to the builtin function; if convenient,
5778 the result should be placed in TARGET. */
5780 expand_builtin_signbit (tree exp
, rtx target
)
5782 const struct real_format
*fmt
;
5783 enum machine_mode fmode
, imode
, rmode
;
5784 HOST_WIDE_INT hi
, lo
;
5787 enum insn_code icode
;
5790 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5793 arg
= CALL_EXPR_ARG (exp
, 0);
5794 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5795 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5796 fmt
= REAL_MODE_FORMAT (fmode
);
5798 arg
= builtin_save_expr (arg
);
5800 /* Expand the argument yielding a RTX expression. */
5801 temp
= expand_normal (arg
);
5803 /* Check if the back end provides an insn that handles signbit for the
5805 icode
= signbit_optab
->handlers
[(int) fmode
].insn_code
;
5806 if (icode
!= CODE_FOR_nothing
)
5808 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5809 emit_unop_insn (icode
, target
, temp
, UNKNOWN
);
5813 /* For floating point formats without a sign bit, implement signbit
5815 bitpos
= fmt
->signbit_ro
;
5818 /* But we can't do this if the format supports signed zero. */
5819 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5822 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5823 build_real (TREE_TYPE (arg
), dconst0
));
5824 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5827 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5829 imode
= int_mode_for_mode (fmode
);
5830 if (imode
== BLKmode
)
5832 temp
= gen_lowpart (imode
, temp
);
5837 /* Handle targets with different FP word orders. */
5838 if (FLOAT_WORDS_BIG_ENDIAN
)
5839 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5841 word
= bitpos
/ BITS_PER_WORD
;
5842 temp
= operand_subword_force (temp
, word
, fmode
);
5843 bitpos
= bitpos
% BITS_PER_WORD
;
5846 /* Force the intermediate word_mode (or narrower) result into a
5847 register. This avoids attempting to create paradoxical SUBREGs
5848 of floating point modes below. */
5849 temp
= force_reg (imode
, temp
);
5851 /* If the bitpos is within the "result mode" lowpart, the operation
5852 can be implement with a single bitwise AND. Otherwise, we need
5853 a right shift and an AND. */
5855 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5857 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5860 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5864 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5868 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5869 temp
= gen_lowpart (rmode
, temp
);
5870 temp
= expand_binop (rmode
, and_optab
, temp
,
5871 immed_double_const (lo
, hi
, rmode
),
5872 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5876 /* Perform a logical right shift to place the signbit in the least
5877 significant bit, then truncate the result to the desired mode
5878 and mask just this bit. */
5879 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5880 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5881 temp
= gen_lowpart (rmode
, temp
);
5882 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5883 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5889 /* Expand fork or exec calls. TARGET is the desired target of the
5890 call. EXP is the call. FN is the
5891 identificator of the actual function. IGNORE is nonzero if the
5892 value is to be ignored. */
5895 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5900 /* If we are not profiling, just call the function. */
5901 if (!profile_arc_flag
)
5904 /* Otherwise call the wrapper. This should be equivalent for the rest of
5905 compiler, so the code does not diverge, and the wrapper may run the
5906 code necessary for keeping the profiling sane. */
5908 switch (DECL_FUNCTION_CODE (fn
))
5911 id
= get_identifier ("__gcov_fork");
5914 case BUILT_IN_EXECL
:
5915 id
= get_identifier ("__gcov_execl");
5918 case BUILT_IN_EXECV
:
5919 id
= get_identifier ("__gcov_execv");
5922 case BUILT_IN_EXECLP
:
5923 id
= get_identifier ("__gcov_execlp");
5926 case BUILT_IN_EXECLE
:
5927 id
= get_identifier ("__gcov_execle");
5930 case BUILT_IN_EXECVP
:
5931 id
= get_identifier ("__gcov_execvp");
5934 case BUILT_IN_EXECVE
:
5935 id
= get_identifier ("__gcov_execve");
5942 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5943 DECL_EXTERNAL (decl
) = 1;
5944 TREE_PUBLIC (decl
) = 1;
5945 DECL_ARTIFICIAL (decl
) = 1;
5946 TREE_NOTHROW (decl
) = 1;
5947 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5948 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5949 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5950 return expand_call (call
, target
, ignore
);
5955 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5956 the pointer in these functions is void*, the tree optimizers may remove
5957 casts. The mode computed in expand_builtin isn't reliable either, due
5958 to __sync_bool_compare_and_swap.
5960 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5961 group of builtins. This gives us log2 of the mode size. */
5963 static inline enum machine_mode
5964 get_builtin_sync_mode (int fcode_diff
)
5966 /* The size is not negotiable, so ask not to get BLKmode in return
5967 if the target indicates that a smaller size would be better. */
5968 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5971 /* Expand the memory expression LOC and return the appropriate memory operand
5972 for the builtin_sync operations. */
5975 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5979 addr
= expand_expr (loc
, NULL_RTX
, Pmode
, EXPAND_SUM
);
5981 /* Note that we explicitly do not want any alias information for this
5982 memory, so that we kill all other live memories. Otherwise we don't
5983 satisfy the full barrier semantics of the intrinsic. */
5984 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5986 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5987 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5988 MEM_VOLATILE_P (mem
) = 1;
5993 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5994 EXP is the CALL_EXPR. CODE is the rtx code
5995 that corresponds to the arithmetic or logical operation from the name;
5996 an exception here is that NOT actually means NAND. TARGET is an optional
5997 place for us to store the results; AFTER is true if this is the
5998 fetch_and_xxx form. IGNORE is true if we don't actually care about
5999 the result of the operation at all. */
6002 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
6003 enum rtx_code code
, bool after
,
6004 rtx target
, bool ignore
)
6007 enum machine_mode old_mode
;
6009 if (code
== NOT
&& warn_sync_nand
)
6011 tree fndecl
= get_callee_fndecl (exp
);
6012 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6014 static bool warned_f_a_n
, warned_n_a_f
;
6018 case BUILT_IN_FETCH_AND_NAND_1
:
6019 case BUILT_IN_FETCH_AND_NAND_2
:
6020 case BUILT_IN_FETCH_AND_NAND_4
:
6021 case BUILT_IN_FETCH_AND_NAND_8
:
6022 case BUILT_IN_FETCH_AND_NAND_16
:
6027 fndecl
= implicit_built_in_decls
[BUILT_IN_FETCH_AND_NAND_N
];
6028 inform (input_location
,
6029 "%qD changed semantics in GCC 4.4", fndecl
);
6030 warned_f_a_n
= true;
6033 case BUILT_IN_NAND_AND_FETCH_1
:
6034 case BUILT_IN_NAND_AND_FETCH_2
:
6035 case BUILT_IN_NAND_AND_FETCH_4
:
6036 case BUILT_IN_NAND_AND_FETCH_8
:
6037 case BUILT_IN_NAND_AND_FETCH_16
:
6042 fndecl
= implicit_built_in_decls
[BUILT_IN_NAND_AND_FETCH_N
];
6043 inform (input_location
,
6044 "%qD changed semantics in GCC 4.4", fndecl
);
6045 warned_n_a_f
= true;
6053 /* Expand the operands. */
6054 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6056 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
6057 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6058 of CONST_INTs, where we know the old_mode only from the call argument. */
6059 old_mode
= GET_MODE (val
);
6060 if (old_mode
== VOIDmode
)
6061 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6062 val
= convert_modes (mode
, old_mode
, val
, 1);
6065 return expand_sync_operation (mem
, val
, code
);
6067 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
6070 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6071 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6072 true if this is the boolean form. TARGET is a place for us to store the
6073 results; this is NOT optional if IS_BOOL is true. */
6076 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
6077 bool is_bool
, rtx target
)
6079 rtx old_val
, new_val
, mem
;
6080 enum machine_mode old_mode
;
6082 /* Expand the operands. */
6083 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6086 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
6087 mode
, EXPAND_NORMAL
);
6088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6089 of CONST_INTs, where we know the old_mode only from the call argument. */
6090 old_mode
= GET_MODE (old_val
);
6091 if (old_mode
== VOIDmode
)
6092 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6093 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
6095 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
6096 mode
, EXPAND_NORMAL
);
6097 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6098 of CONST_INTs, where we know the old_mode only from the call argument. */
6099 old_mode
= GET_MODE (new_val
);
6100 if (old_mode
== VOIDmode
)
6101 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
6102 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
6105 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
6107 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
6110 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6111 general form is actually an atomic exchange, and some targets only
6112 support a reduced form with the second argument being a constant 1.
6113 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6117 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
6121 enum machine_mode old_mode
;
6123 /* Expand the operands. */
6124 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6125 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
6126 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6127 of CONST_INTs, where we know the old_mode only from the call argument. */
6128 old_mode
= GET_MODE (val
);
6129 if (old_mode
== VOIDmode
)
6130 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6131 val
= convert_modes (mode
, old_mode
, val
, 1);
6133 return expand_sync_lock_test_and_set (mem
, val
, target
);
6136 /* Expand the __sync_synchronize intrinsic. */
6139 expand_builtin_synchronize (void)
6143 #ifdef HAVE_memory_barrier
6144 if (HAVE_memory_barrier
)
6146 emit_insn (gen_memory_barrier ());
6151 if (synchronize_libfunc
!= NULL_RTX
)
6153 emit_library_call (synchronize_libfunc
, LCT_NORMAL
, VOIDmode
, 0);
6157 /* If no explicit memory barrier instruction is available, create an
6158 empty asm stmt with a memory clobber. */
6159 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
6160 tree_cons (NULL
, build_string (6, "memory"), NULL
));
6161 ASM_VOLATILE_P (x
) = 1;
6162 expand_asm_expr (x
);
6165 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6168 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
6170 enum insn_code icode
;
6172 rtx val
= const0_rtx
;
6174 /* Expand the operands. */
6175 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6177 /* If there is an explicit operation in the md file, use it. */
6178 icode
= sync_lock_release
[mode
];
6179 if (icode
!= CODE_FOR_nothing
)
6181 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
6182 val
= force_reg (mode
, val
);
6184 insn
= GEN_FCN (icode
) (mem
, val
);
6192 /* Otherwise we can implement this operation by emitting a barrier
6193 followed by a store of zero. */
6194 expand_builtin_synchronize ();
6195 emit_move_insn (mem
, val
);
6198 /* Expand an expression EXP that calls a built-in function,
6199 with result going to TARGET if that's convenient
6200 (and in mode MODE if that's convenient).
6201 SUBTARGET may be used as the target for computing one of EXP's operands.
6202 IGNORE is nonzero if the value is to be ignored. */
6205 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
6208 tree fndecl
= get_callee_fndecl (exp
);
6209 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6210 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6212 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6213 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6215 /* When not optimizing, generate calls to library functions for a certain
6218 && !called_as_built_in (fndecl
)
6219 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
6220 && fcode
!= BUILT_IN_ALLOCA
6221 && fcode
!= BUILT_IN_FREE
)
6222 return expand_call (exp
, target
, ignore
);
6224 /* The built-in function expanders test for target == const0_rtx
6225 to determine whether the function's result will be ignored. */
6227 target
= const0_rtx
;
6229 /* If the result of a pure or const built-in function is ignored, and
6230 none of its arguments are volatile, we can avoid expanding the
6231 built-in call and just evaluate the arguments for side-effects. */
6232 if (target
== const0_rtx
6233 && (DECL_PURE_P (fndecl
) || TREE_READONLY (fndecl
)))
6235 bool volatilep
= false;
6237 call_expr_arg_iterator iter
;
6239 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6240 if (TREE_THIS_VOLATILE (arg
))
6248 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6249 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6256 CASE_FLT_FN (BUILT_IN_FABS
):
6257 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6262 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6263 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6268 /* Just do a normal library call if we were unable to fold
6270 CASE_FLT_FN (BUILT_IN_CABS
):
6273 CASE_FLT_FN (BUILT_IN_EXP
):
6274 CASE_FLT_FN (BUILT_IN_EXP10
):
6275 CASE_FLT_FN (BUILT_IN_POW10
):
6276 CASE_FLT_FN (BUILT_IN_EXP2
):
6277 CASE_FLT_FN (BUILT_IN_EXPM1
):
6278 CASE_FLT_FN (BUILT_IN_LOGB
):
6279 CASE_FLT_FN (BUILT_IN_LOG
):
6280 CASE_FLT_FN (BUILT_IN_LOG10
):
6281 CASE_FLT_FN (BUILT_IN_LOG2
):
6282 CASE_FLT_FN (BUILT_IN_LOG1P
):
6283 CASE_FLT_FN (BUILT_IN_TAN
):
6284 CASE_FLT_FN (BUILT_IN_ASIN
):
6285 CASE_FLT_FN (BUILT_IN_ACOS
):
6286 CASE_FLT_FN (BUILT_IN_ATAN
):
6287 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6288 because of possible accuracy problems. */
6289 if (! flag_unsafe_math_optimizations
)
6291 CASE_FLT_FN (BUILT_IN_SQRT
):
6292 CASE_FLT_FN (BUILT_IN_FLOOR
):
6293 CASE_FLT_FN (BUILT_IN_CEIL
):
6294 CASE_FLT_FN (BUILT_IN_TRUNC
):
6295 CASE_FLT_FN (BUILT_IN_ROUND
):
6296 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6297 CASE_FLT_FN (BUILT_IN_RINT
):
6298 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6303 CASE_FLT_FN (BUILT_IN_ILOGB
):
6304 if (! flag_unsafe_math_optimizations
)
6306 CASE_FLT_FN (BUILT_IN_ISINF
):
6307 CASE_FLT_FN (BUILT_IN_FINITE
):
6308 case BUILT_IN_ISFINITE
:
6309 case BUILT_IN_ISNORMAL
:
6310 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
6315 CASE_FLT_FN (BUILT_IN_LCEIL
):
6316 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6317 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6318 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6319 target
= expand_builtin_int_roundingfn (exp
, target
);
6324 CASE_FLT_FN (BUILT_IN_LRINT
):
6325 CASE_FLT_FN (BUILT_IN_LLRINT
):
6326 CASE_FLT_FN (BUILT_IN_LROUND
):
6327 CASE_FLT_FN (BUILT_IN_LLROUND
):
6328 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6333 CASE_FLT_FN (BUILT_IN_POW
):
6334 target
= expand_builtin_pow (exp
, target
, subtarget
);
6339 CASE_FLT_FN (BUILT_IN_POWI
):
6340 target
= expand_builtin_powi (exp
, target
, subtarget
);
6345 CASE_FLT_FN (BUILT_IN_ATAN2
):
6346 CASE_FLT_FN (BUILT_IN_LDEXP
):
6347 CASE_FLT_FN (BUILT_IN_SCALB
):
6348 CASE_FLT_FN (BUILT_IN_SCALBN
):
6349 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6350 if (! flag_unsafe_math_optimizations
)
6353 CASE_FLT_FN (BUILT_IN_FMOD
):
6354 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6355 CASE_FLT_FN (BUILT_IN_DREM
):
6356 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6361 CASE_FLT_FN (BUILT_IN_CEXPI
):
6362 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
6363 gcc_assert (target
);
6366 CASE_FLT_FN (BUILT_IN_SIN
):
6367 CASE_FLT_FN (BUILT_IN_COS
):
6368 if (! flag_unsafe_math_optimizations
)
6370 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6375 CASE_FLT_FN (BUILT_IN_SINCOS
):
6376 if (! flag_unsafe_math_optimizations
)
6378 target
= expand_builtin_sincos (exp
);
6383 case BUILT_IN_APPLY_ARGS
:
6384 return expand_builtin_apply_args ();
6386 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6387 FUNCTION with a copy of the parameters described by
6388 ARGUMENTS, and ARGSIZE. It returns a block of memory
6389 allocated on the stack into which is stored all the registers
6390 that might possibly be used for returning the result of a
6391 function. ARGUMENTS is the value returned by
6392 __builtin_apply_args. ARGSIZE is the number of bytes of
6393 arguments that must be copied. ??? How should this value be
6394 computed? We'll also need a safe worst case value for varargs
6396 case BUILT_IN_APPLY
:
6397 if (!validate_arglist (exp
, POINTER_TYPE
,
6398 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6399 && !validate_arglist (exp
, REFERENCE_TYPE
,
6400 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6406 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6407 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6408 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6410 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6413 /* __builtin_return (RESULT) causes the function to return the
6414 value described by RESULT. RESULT is address of the block of
6415 memory returned by __builtin_apply. */
6416 case BUILT_IN_RETURN
:
6417 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6418 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6421 case BUILT_IN_SAVEREGS
:
6422 return expand_builtin_saveregs ();
6424 case BUILT_IN_ARGS_INFO
:
6425 return expand_builtin_args_info (exp
);
6427 case BUILT_IN_VA_ARG_PACK
:
6428 /* All valid uses of __builtin_va_arg_pack () are removed during
6430 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6433 case BUILT_IN_VA_ARG_PACK_LEN
:
6434 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6436 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6439 /* Return the address of the first anonymous stack arg. */
6440 case BUILT_IN_NEXT_ARG
:
6441 if (fold_builtin_next_arg (exp
, false))
6443 return expand_builtin_next_arg ();
6445 case BUILT_IN_CLEAR_CACHE
:
6446 target
= expand_builtin___clear_cache (exp
);
6451 case BUILT_IN_CLASSIFY_TYPE
:
6452 return expand_builtin_classify_type (exp
);
6454 case BUILT_IN_CONSTANT_P
:
6457 case BUILT_IN_FRAME_ADDRESS
:
6458 case BUILT_IN_RETURN_ADDRESS
:
6459 return expand_builtin_frame_address (fndecl
, exp
);
6461 /* Returns the address of the area where the structure is returned.
6463 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6464 if (call_expr_nargs (exp
) != 0
6465 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6466 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6469 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6471 case BUILT_IN_ALLOCA
:
6472 target
= expand_builtin_alloca (exp
, target
);
6477 case BUILT_IN_STACK_SAVE
:
6478 return expand_stack_save ();
6480 case BUILT_IN_STACK_RESTORE
:
6481 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6484 case BUILT_IN_BSWAP32
:
6485 case BUILT_IN_BSWAP64
:
6486 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6492 CASE_INT_FN (BUILT_IN_FFS
):
6493 case BUILT_IN_FFSIMAX
:
6494 target
= expand_builtin_unop (target_mode
, exp
, target
,
6495 subtarget
, ffs_optab
);
6500 CASE_INT_FN (BUILT_IN_CLZ
):
6501 case BUILT_IN_CLZIMAX
:
6502 target
= expand_builtin_unop (target_mode
, exp
, target
,
6503 subtarget
, clz_optab
);
6508 CASE_INT_FN (BUILT_IN_CTZ
):
6509 case BUILT_IN_CTZIMAX
:
6510 target
= expand_builtin_unop (target_mode
, exp
, target
,
6511 subtarget
, ctz_optab
);
6516 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6517 case BUILT_IN_POPCOUNTIMAX
:
6518 target
= expand_builtin_unop (target_mode
, exp
, target
,
6519 subtarget
, popcount_optab
);
6524 CASE_INT_FN (BUILT_IN_PARITY
):
6525 case BUILT_IN_PARITYIMAX
:
6526 target
= expand_builtin_unop (target_mode
, exp
, target
,
6527 subtarget
, parity_optab
);
6532 case BUILT_IN_STRLEN
:
6533 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6538 case BUILT_IN_STRCPY
:
6539 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6544 case BUILT_IN_STRNCPY
:
6545 target
= expand_builtin_strncpy (exp
, target
, mode
);
6550 case BUILT_IN_STPCPY
:
6551 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6556 case BUILT_IN_STRCAT
:
6557 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6562 case BUILT_IN_STRNCAT
:
6563 target
= expand_builtin_strncat (exp
, target
, mode
);
6568 case BUILT_IN_STRSPN
:
6569 target
= expand_builtin_strspn (exp
, target
, mode
);
6574 case BUILT_IN_STRCSPN
:
6575 target
= expand_builtin_strcspn (exp
, target
, mode
);
6580 case BUILT_IN_STRSTR
:
6581 target
= expand_builtin_strstr (exp
, target
, mode
);
6586 case BUILT_IN_STRPBRK
:
6587 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6592 case BUILT_IN_INDEX
:
6593 case BUILT_IN_STRCHR
:
6594 target
= expand_builtin_strchr (exp
, target
, mode
);
6599 case BUILT_IN_RINDEX
:
6600 case BUILT_IN_STRRCHR
:
6601 target
= expand_builtin_strrchr (exp
, target
, mode
);
6606 case BUILT_IN_MEMCPY
:
6607 target
= expand_builtin_memcpy (exp
, target
, mode
);
6612 case BUILT_IN_MEMPCPY
:
6613 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6618 case BUILT_IN_MEMMOVE
:
6619 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6624 case BUILT_IN_BCOPY
:
6625 target
= expand_builtin_bcopy (exp
, ignore
);
6630 case BUILT_IN_MEMSET
:
6631 target
= expand_builtin_memset (exp
, target
, mode
);
6636 case BUILT_IN_BZERO
:
6637 target
= expand_builtin_bzero (exp
);
6642 case BUILT_IN_STRCMP
:
6643 target
= expand_builtin_strcmp (exp
, target
, mode
);
6648 case BUILT_IN_STRNCMP
:
6649 target
= expand_builtin_strncmp (exp
, target
, mode
);
6654 case BUILT_IN_MEMCHR
:
6655 target
= expand_builtin_memchr (exp
, target
, mode
);
6661 case BUILT_IN_MEMCMP
:
6662 target
= expand_builtin_memcmp (exp
, target
, mode
);
6667 case BUILT_IN_SETJMP
:
6668 /* This should have been lowered to the builtins below. */
6671 case BUILT_IN_SETJMP_SETUP
:
6672 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6673 and the receiver label. */
6674 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6676 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6677 VOIDmode
, EXPAND_NORMAL
);
6678 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6679 rtx label_r
= label_rtx (label
);
6681 /* This is copied from the handling of non-local gotos. */
6682 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6683 nonlocal_goto_handler_labels
6684 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6685 nonlocal_goto_handler_labels
);
6686 /* ??? Do not let expand_label treat us as such since we would
6687 not want to be both on the list of non-local labels and on
6688 the list of forced labels. */
6689 FORCED_LABEL (label
) = 0;
6694 case BUILT_IN_SETJMP_DISPATCHER
:
6695 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6696 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6698 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6699 rtx label_r
= label_rtx (label
);
6701 /* Remove the dispatcher label from the list of non-local labels
6702 since the receiver labels have been added to it above. */
6703 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6708 case BUILT_IN_SETJMP_RECEIVER
:
6709 /* __builtin_setjmp_receiver is passed the receiver label. */
6710 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6712 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6713 rtx label_r
= label_rtx (label
);
6715 expand_builtin_setjmp_receiver (label_r
);
6720 /* __builtin_longjmp is passed a pointer to an array of five words.
6721 It's similar to the C library longjmp function but works with
6722 __builtin_setjmp above. */
6723 case BUILT_IN_LONGJMP
:
6724 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6726 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6727 VOIDmode
, EXPAND_NORMAL
);
6728 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6730 if (value
!= const1_rtx
)
6732 error ("%<__builtin_longjmp%> second argument must be 1");
6736 expand_builtin_longjmp (buf_addr
, value
);
6741 case BUILT_IN_NONLOCAL_GOTO
:
6742 target
= expand_builtin_nonlocal_goto (exp
);
6747 /* This updates the setjmp buffer that is its argument with the value
6748 of the current stack pointer. */
6749 case BUILT_IN_UPDATE_SETJMP_BUF
:
6750 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6753 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6755 expand_builtin_update_setjmp_buf (buf_addr
);
6761 expand_builtin_trap ();
6764 case BUILT_IN_PRINTF
:
6765 target
= expand_builtin_printf (exp
, target
, mode
, false);
6770 case BUILT_IN_PRINTF_UNLOCKED
:
6771 target
= expand_builtin_printf (exp
, target
, mode
, true);
6776 case BUILT_IN_FPUTS
:
6777 target
= expand_builtin_fputs (exp
, target
, false);
6781 case BUILT_IN_FPUTS_UNLOCKED
:
6782 target
= expand_builtin_fputs (exp
, target
, true);
6787 case BUILT_IN_FPRINTF
:
6788 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6793 case BUILT_IN_FPRINTF_UNLOCKED
:
6794 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6799 case BUILT_IN_SPRINTF
:
6800 target
= expand_builtin_sprintf (exp
, target
, mode
);
6805 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6806 case BUILT_IN_SIGNBITD32
:
6807 case BUILT_IN_SIGNBITD64
:
6808 case BUILT_IN_SIGNBITD128
:
6809 target
= expand_builtin_signbit (exp
, target
);
6814 /* Various hooks for the DWARF 2 __throw routine. */
6815 case BUILT_IN_UNWIND_INIT
:
6816 expand_builtin_unwind_init ();
6818 case BUILT_IN_DWARF_CFA
:
6819 return virtual_cfa_rtx
;
6820 #ifdef DWARF2_UNWIND_INFO
6821 case BUILT_IN_DWARF_SP_COLUMN
:
6822 return expand_builtin_dwarf_sp_column ();
6823 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6824 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6827 case BUILT_IN_FROB_RETURN_ADDR
:
6828 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6829 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6830 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6831 case BUILT_IN_EH_RETURN
:
6832 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6833 CALL_EXPR_ARG (exp
, 1));
6835 #ifdef EH_RETURN_DATA_REGNO
6836 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6837 return expand_builtin_eh_return_data_regno (exp
);
6839 case BUILT_IN_EXTEND_POINTER
:
6840 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6842 case BUILT_IN_VA_START
:
6843 return expand_builtin_va_start (exp
);
6844 case BUILT_IN_VA_END
:
6845 return expand_builtin_va_end (exp
);
6846 case BUILT_IN_VA_COPY
:
6847 return expand_builtin_va_copy (exp
);
6848 case BUILT_IN_EXPECT
:
6849 return expand_builtin_expect (exp
, target
);
6850 case BUILT_IN_PREFETCH
:
6851 expand_builtin_prefetch (exp
);
6854 case BUILT_IN_PROFILE_FUNC_ENTER
:
6855 return expand_builtin_profile_func (false);
6856 case BUILT_IN_PROFILE_FUNC_EXIT
:
6857 return expand_builtin_profile_func (true);
6859 case BUILT_IN_INIT_TRAMPOLINE
:
6860 return expand_builtin_init_trampoline (exp
);
6861 case BUILT_IN_ADJUST_TRAMPOLINE
:
6862 return expand_builtin_adjust_trampoline (exp
);
6865 case BUILT_IN_EXECL
:
6866 case BUILT_IN_EXECV
:
6867 case BUILT_IN_EXECLP
:
6868 case BUILT_IN_EXECLE
:
6869 case BUILT_IN_EXECVP
:
6870 case BUILT_IN_EXECVE
:
6871 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6876 case BUILT_IN_FETCH_AND_ADD_1
:
6877 case BUILT_IN_FETCH_AND_ADD_2
:
6878 case BUILT_IN_FETCH_AND_ADD_4
:
6879 case BUILT_IN_FETCH_AND_ADD_8
:
6880 case BUILT_IN_FETCH_AND_ADD_16
:
6881 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6882 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6883 false, target
, ignore
);
6888 case BUILT_IN_FETCH_AND_SUB_1
:
6889 case BUILT_IN_FETCH_AND_SUB_2
:
6890 case BUILT_IN_FETCH_AND_SUB_4
:
6891 case BUILT_IN_FETCH_AND_SUB_8
:
6892 case BUILT_IN_FETCH_AND_SUB_16
:
6893 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6894 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6895 false, target
, ignore
);
6900 case BUILT_IN_FETCH_AND_OR_1
:
6901 case BUILT_IN_FETCH_AND_OR_2
:
6902 case BUILT_IN_FETCH_AND_OR_4
:
6903 case BUILT_IN_FETCH_AND_OR_8
:
6904 case BUILT_IN_FETCH_AND_OR_16
:
6905 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6906 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6907 false, target
, ignore
);
6912 case BUILT_IN_FETCH_AND_AND_1
:
6913 case BUILT_IN_FETCH_AND_AND_2
:
6914 case BUILT_IN_FETCH_AND_AND_4
:
6915 case BUILT_IN_FETCH_AND_AND_8
:
6916 case BUILT_IN_FETCH_AND_AND_16
:
6917 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6918 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6919 false, target
, ignore
);
6924 case BUILT_IN_FETCH_AND_XOR_1
:
6925 case BUILT_IN_FETCH_AND_XOR_2
:
6926 case BUILT_IN_FETCH_AND_XOR_4
:
6927 case BUILT_IN_FETCH_AND_XOR_8
:
6928 case BUILT_IN_FETCH_AND_XOR_16
:
6929 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6930 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6931 false, target
, ignore
);
6936 case BUILT_IN_FETCH_AND_NAND_1
:
6937 case BUILT_IN_FETCH_AND_NAND_2
:
6938 case BUILT_IN_FETCH_AND_NAND_4
:
6939 case BUILT_IN_FETCH_AND_NAND_8
:
6940 case BUILT_IN_FETCH_AND_NAND_16
:
6941 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6942 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6943 false, target
, ignore
);
6948 case BUILT_IN_ADD_AND_FETCH_1
:
6949 case BUILT_IN_ADD_AND_FETCH_2
:
6950 case BUILT_IN_ADD_AND_FETCH_4
:
6951 case BUILT_IN_ADD_AND_FETCH_8
:
6952 case BUILT_IN_ADD_AND_FETCH_16
:
6953 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6954 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6955 true, target
, ignore
);
6960 case BUILT_IN_SUB_AND_FETCH_1
:
6961 case BUILT_IN_SUB_AND_FETCH_2
:
6962 case BUILT_IN_SUB_AND_FETCH_4
:
6963 case BUILT_IN_SUB_AND_FETCH_8
:
6964 case BUILT_IN_SUB_AND_FETCH_16
:
6965 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6966 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6967 true, target
, ignore
);
6972 case BUILT_IN_OR_AND_FETCH_1
:
6973 case BUILT_IN_OR_AND_FETCH_2
:
6974 case BUILT_IN_OR_AND_FETCH_4
:
6975 case BUILT_IN_OR_AND_FETCH_8
:
6976 case BUILT_IN_OR_AND_FETCH_16
:
6977 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6978 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6979 true, target
, ignore
);
6984 case BUILT_IN_AND_AND_FETCH_1
:
6985 case BUILT_IN_AND_AND_FETCH_2
:
6986 case BUILT_IN_AND_AND_FETCH_4
:
6987 case BUILT_IN_AND_AND_FETCH_8
:
6988 case BUILT_IN_AND_AND_FETCH_16
:
6989 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6990 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6991 true, target
, ignore
);
6996 case BUILT_IN_XOR_AND_FETCH_1
:
6997 case BUILT_IN_XOR_AND_FETCH_2
:
6998 case BUILT_IN_XOR_AND_FETCH_4
:
6999 case BUILT_IN_XOR_AND_FETCH_8
:
7000 case BUILT_IN_XOR_AND_FETCH_16
:
7001 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
7002 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
7003 true, target
, ignore
);
7008 case BUILT_IN_NAND_AND_FETCH_1
:
7009 case BUILT_IN_NAND_AND_FETCH_2
:
7010 case BUILT_IN_NAND_AND_FETCH_4
:
7011 case BUILT_IN_NAND_AND_FETCH_8
:
7012 case BUILT_IN_NAND_AND_FETCH_16
:
7013 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
7014 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
7015 true, target
, ignore
);
7020 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
7021 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
7022 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
7023 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
7024 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
7025 if (mode
== VOIDmode
)
7026 mode
= TYPE_MODE (boolean_type_node
);
7027 if (!target
|| !register_operand (target
, mode
))
7028 target
= gen_reg_rtx (mode
);
7030 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
7031 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7036 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
7037 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
7038 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
7039 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
7040 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
7041 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
7042 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7047 case BUILT_IN_LOCK_TEST_AND_SET_1
:
7048 case BUILT_IN_LOCK_TEST_AND_SET_2
:
7049 case BUILT_IN_LOCK_TEST_AND_SET_4
:
7050 case BUILT_IN_LOCK_TEST_AND_SET_8
:
7051 case BUILT_IN_LOCK_TEST_AND_SET_16
:
7052 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
7053 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
7058 case BUILT_IN_LOCK_RELEASE_1
:
7059 case BUILT_IN_LOCK_RELEASE_2
:
7060 case BUILT_IN_LOCK_RELEASE_4
:
7061 case BUILT_IN_LOCK_RELEASE_8
:
7062 case BUILT_IN_LOCK_RELEASE_16
:
7063 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
7064 expand_builtin_lock_release (mode
, exp
);
7067 case BUILT_IN_SYNCHRONIZE
:
7068 expand_builtin_synchronize ();
7071 case BUILT_IN_OBJECT_SIZE
:
7072 return expand_builtin_object_size (exp
);
7074 case BUILT_IN_MEMCPY_CHK
:
7075 case BUILT_IN_MEMPCPY_CHK
:
7076 case BUILT_IN_MEMMOVE_CHK
:
7077 case BUILT_IN_MEMSET_CHK
:
7078 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7083 case BUILT_IN_STRCPY_CHK
:
7084 case BUILT_IN_STPCPY_CHK
:
7085 case BUILT_IN_STRNCPY_CHK
:
7086 case BUILT_IN_STRCAT_CHK
:
7087 case BUILT_IN_STRNCAT_CHK
:
7088 case BUILT_IN_SNPRINTF_CHK
:
7089 case BUILT_IN_VSNPRINTF_CHK
:
7090 maybe_emit_chk_warning (exp
, fcode
);
7093 case BUILT_IN_SPRINTF_CHK
:
7094 case BUILT_IN_VSPRINTF_CHK
:
7095 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7099 maybe_emit_free_warning (exp
);
7102 default: /* just do library call, if unknown builtin */
7106 /* The switch statement above can drop through to cause the function
7107 to be called normally. */
7108 return expand_call (exp
, target
, ignore
);
7111 /* Determine whether a tree node represents a call to a built-in
7112 function. If the tree T is a call to a built-in function with
7113 the right number of arguments of the appropriate types, return
7114 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7115 Otherwise the return value is END_BUILTINS. */
7117 enum built_in_function
7118 builtin_mathfn_code (const_tree t
)
7120 const_tree fndecl
, arg
, parmlist
;
7121 const_tree argtype
, parmtype
;
7122 const_call_expr_arg_iterator iter
;
7124 if (TREE_CODE (t
) != CALL_EXPR
7125 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7126 return END_BUILTINS
;
7128 fndecl
= get_callee_fndecl (t
);
7129 if (fndecl
== NULL_TREE
7130 || TREE_CODE (fndecl
) != FUNCTION_DECL
7131 || ! DECL_BUILT_IN (fndecl
)
7132 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7133 return END_BUILTINS
;
7135 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7136 init_const_call_expr_arg_iterator (t
, &iter
);
7137 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7139 /* If a function doesn't take a variable number of arguments,
7140 the last element in the list will have type `void'. */
7141 parmtype
= TREE_VALUE (parmlist
);
7142 if (VOID_TYPE_P (parmtype
))
7144 if (more_const_call_expr_args_p (&iter
))
7145 return END_BUILTINS
;
7146 return DECL_FUNCTION_CODE (fndecl
);
7149 if (! more_const_call_expr_args_p (&iter
))
7150 return END_BUILTINS
;
7152 arg
= next_const_call_expr_arg (&iter
);
7153 argtype
= TREE_TYPE (arg
);
7155 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7157 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7158 return END_BUILTINS
;
7160 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7162 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7163 return END_BUILTINS
;
7165 else if (POINTER_TYPE_P (parmtype
))
7167 if (! POINTER_TYPE_P (argtype
))
7168 return END_BUILTINS
;
7170 else if (INTEGRAL_TYPE_P (parmtype
))
7172 if (! INTEGRAL_TYPE_P (argtype
))
7173 return END_BUILTINS
;
7176 return END_BUILTINS
;
7179 /* Variable-length argument list. */
7180 return DECL_FUNCTION_CODE (fndecl
);
7183 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7184 evaluate to a constant. */
7187 fold_builtin_constant_p (tree arg
)
7189 /* We return 1 for a numeric type that's known to be a constant
7190 value at compile-time or for an aggregate type that's a
7191 literal constant. */
7194 /* If we know this is a constant, emit the constant of one. */
7195 if (CONSTANT_CLASS_P (arg
)
7196 || (TREE_CODE (arg
) == CONSTRUCTOR
7197 && TREE_CONSTANT (arg
)))
7198 return integer_one_node
;
7199 if (TREE_CODE (arg
) == ADDR_EXPR
)
7201 tree op
= TREE_OPERAND (arg
, 0);
7202 if (TREE_CODE (op
) == STRING_CST
7203 || (TREE_CODE (op
) == ARRAY_REF
7204 && integer_zerop (TREE_OPERAND (op
, 1))
7205 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7206 return integer_one_node
;
7209 /* If this expression has side effects, show we don't know it to be a
7210 constant. Likewise if it's a pointer or aggregate type since in
7211 those case we only want literals, since those are only optimized
7212 when generating RTL, not later.
7213 And finally, if we are compiling an initializer, not code, we
7214 need to return a definite result now; there's not going to be any
7215 more optimization done. */
7216 if (TREE_SIDE_EFFECTS (arg
)
7217 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7218 || POINTER_TYPE_P (TREE_TYPE (arg
))
7220 || folding_initializer
)
7221 return integer_zero_node
;
7226 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7227 return it as a truthvalue. */
7230 build_builtin_expect_predicate (tree pred
, tree expected
)
7232 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7234 fn
= built_in_decls
[BUILT_IN_EXPECT
];
7235 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7236 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7237 pred_type
= TREE_VALUE (arg_types
);
7238 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7240 pred
= fold_convert (pred_type
, pred
);
7241 expected
= fold_convert (expected_type
, expected
);
7242 call_expr
= build_call_expr (fn
, 2, pred
, expected
);
7244 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7245 build_int_cst (ret_type
, 0));
7248 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7249 NULL_TREE if no simplification is possible. */
7252 fold_builtin_expect (tree arg0
, tree arg1
)
7255 enum tree_code code
;
7257 /* If this is a builtin_expect within a builtin_expect keep the
7258 inner one. See through a comparison against a constant. It
7259 might have been added to create a thruthvalue. */
7261 if (COMPARISON_CLASS_P (inner
)
7262 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7263 inner
= TREE_OPERAND (inner
, 0);
7265 if (TREE_CODE (inner
) == CALL_EXPR
7266 && (fndecl
= get_callee_fndecl (inner
))
7267 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7268 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7271 /* Distribute the expected value over short-circuiting operators.
7272 See through the cast from truthvalue_type_node to long. */
7274 while (TREE_CODE (inner
) == NOP_EXPR
7275 && INTEGRAL_TYPE_P (TREE_TYPE (inner
))
7276 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner
, 0))))
7277 inner
= TREE_OPERAND (inner
, 0);
7279 code
= TREE_CODE (inner
);
7280 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7282 tree op0
= TREE_OPERAND (inner
, 0);
7283 tree op1
= TREE_OPERAND (inner
, 1);
7285 op0
= build_builtin_expect_predicate (op0
, arg1
);
7286 op1
= build_builtin_expect_predicate (op1
, arg1
);
7287 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7289 return fold_convert (TREE_TYPE (arg0
), inner
);
7292 /* If the argument isn't invariant then there's nothing else we can do. */
7293 if (!TREE_CONSTANT (arg0
))
7296 /* If we expect that a comparison against the argument will fold to
7297 a constant return the constant. In practice, this means a true
7298 constant or the address of a non-weak symbol. */
7301 if (TREE_CODE (inner
) == ADDR_EXPR
)
7305 inner
= TREE_OPERAND (inner
, 0);
7307 while (TREE_CODE (inner
) == COMPONENT_REF
7308 || TREE_CODE (inner
) == ARRAY_REF
);
7309 if (DECL_P (inner
) && DECL_WEAK (inner
))
7313 /* Otherwise, ARG0 already has the proper type for the return value. */
7317 /* Fold a call to __builtin_classify_type with argument ARG. */
7320 fold_builtin_classify_type (tree arg
)
7323 return build_int_cst (NULL_TREE
, no_type_class
);
7325 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
7328 /* Fold a call to __builtin_strlen with argument ARG. */
7331 fold_builtin_strlen (tree arg
)
7333 if (!validate_arg (arg
, POINTER_TYPE
))
7337 tree len
= c_strlen (arg
, 0);
7341 /* Convert from the internal "sizetype" type to "size_t". */
7343 len
= fold_convert (size_type_node
, len
);
7351 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7354 fold_builtin_inf (tree type
, int warn
)
7356 REAL_VALUE_TYPE real
;
7358 /* __builtin_inff is intended to be usable to define INFINITY on all
7359 targets. If an infinity is not available, INFINITY expands "to a
7360 positive constant of type float that overflows at translation
7361 time", footnote "In this case, using INFINITY will violate the
7362 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7363 Thus we pedwarn to ensure this constraint violation is
7365 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7366 pedwarn (input_location
, 0, "target format does not support infinity");
7369 return build_real (type
, real
);
7372 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7375 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7377 REAL_VALUE_TYPE real
;
7380 if (!validate_arg (arg
, POINTER_TYPE
))
7382 str
= c_getstr (arg
);
7386 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7389 return build_real (type
, real
);
7392 /* Return true if the floating point expression T has an integer value.
7393 We also allow +Inf, -Inf and NaN to be considered integer values. */
7396 integer_valued_real_p (tree t
)
7398 switch (TREE_CODE (t
))
7405 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7410 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7417 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7418 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7421 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7422 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7425 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7429 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7430 if (TREE_CODE (type
) == INTEGER_TYPE
)
7432 if (TREE_CODE (type
) == REAL_TYPE
)
7433 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7438 switch (builtin_mathfn_code (t
))
7440 CASE_FLT_FN (BUILT_IN_CEIL
):
7441 CASE_FLT_FN (BUILT_IN_FLOOR
):
7442 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7443 CASE_FLT_FN (BUILT_IN_RINT
):
7444 CASE_FLT_FN (BUILT_IN_ROUND
):
7445 CASE_FLT_FN (BUILT_IN_TRUNC
):
7448 CASE_FLT_FN (BUILT_IN_FMIN
):
7449 CASE_FLT_FN (BUILT_IN_FMAX
):
7450 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7451 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7464 /* FNDECL is assumed to be a builtin where truncation can be propagated
7465 across (for instance floor((double)f) == (double)floorf (f).
7466 Do the transformation for a call with argument ARG. */
7469 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7471 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7473 if (!validate_arg (arg
, REAL_TYPE
))
7476 /* Integer rounding functions are idempotent. */
7477 if (fcode
== builtin_mathfn_code (arg
))
7480 /* If argument is already integer valued, and we don't need to worry
7481 about setting errno, there's no need to perform rounding. */
7482 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7487 tree arg0
= strip_float_extensions (arg
);
7488 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7489 tree newtype
= TREE_TYPE (arg0
);
7492 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7493 && (decl
= mathfn_built_in (newtype
, fcode
)))
7494 return fold_convert (ftype
,
7495 build_call_expr (decl
, 1,
7496 fold_convert (newtype
, arg0
)));
7501 /* FNDECL is assumed to be builtin which can narrow the FP type of
7502 the argument, for instance lround((double)f) -> lroundf (f).
7503 Do the transformation for a call with argument ARG. */
7506 fold_fixed_mathfn (tree fndecl
, tree arg
)
7508 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7510 if (!validate_arg (arg
, REAL_TYPE
))
7513 /* If argument is already integer valued, and we don't need to worry
7514 about setting errno, there's no need to perform rounding. */
7515 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7516 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7520 tree ftype
= TREE_TYPE (arg
);
7521 tree arg0
= strip_float_extensions (arg
);
7522 tree newtype
= TREE_TYPE (arg0
);
7525 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7526 && (decl
= mathfn_built_in (newtype
, fcode
)))
7527 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7530 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7531 sizeof (long long) == sizeof (long). */
7532 if (TYPE_PRECISION (long_long_integer_type_node
)
7533 == TYPE_PRECISION (long_integer_type_node
))
7535 tree newfn
= NULL_TREE
;
7538 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7539 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7542 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7543 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7546 CASE_FLT_FN (BUILT_IN_LLROUND
):
7547 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7550 CASE_FLT_FN (BUILT_IN_LLRINT
):
7551 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7560 tree newcall
= build_call_expr(newfn
, 1, arg
);
7561 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7568 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7569 return type. Return NULL_TREE if no simplification can be made. */
7572 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7576 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7577 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7580 /* Calculate the result when the argument is a constant. */
7581 if (TREE_CODE (arg
) == COMPLEX_CST
7582 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7586 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7588 tree real
= TREE_OPERAND (arg
, 0);
7589 tree imag
= TREE_OPERAND (arg
, 1);
7591 /* If either part is zero, cabs is fabs of the other. */
7592 if (real_zerop (real
))
7593 return fold_build1 (ABS_EXPR
, type
, imag
);
7594 if (real_zerop (imag
))
7595 return fold_build1 (ABS_EXPR
, type
, real
);
7597 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7598 if (flag_unsafe_math_optimizations
7599 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7601 const REAL_VALUE_TYPE sqrt2_trunc
7602 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7604 return fold_build2 (MULT_EXPR
, type
,
7605 fold_build1 (ABS_EXPR
, type
, real
),
7606 build_real (type
, sqrt2_trunc
));
7610 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7611 if (TREE_CODE (arg
) == NEGATE_EXPR
7612 || TREE_CODE (arg
) == CONJ_EXPR
)
7613 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7615 /* Don't do this when optimizing for size. */
7616 if (flag_unsafe_math_optimizations
7617 && optimize
&& optimize_function_for_speed_p (cfun
))
7619 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7621 if (sqrtfn
!= NULL_TREE
)
7623 tree rpart
, ipart
, result
;
7625 arg
= builtin_save_expr (arg
);
7627 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7628 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7630 rpart
= builtin_save_expr (rpart
);
7631 ipart
= builtin_save_expr (ipart
);
7633 result
= fold_build2 (PLUS_EXPR
, type
,
7634 fold_build2 (MULT_EXPR
, type
,
7636 fold_build2 (MULT_EXPR
, type
,
7639 return build_call_expr (sqrtfn
, 1, result
);
7646 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7647 Return NULL_TREE if no simplification can be made. */
7650 fold_builtin_sqrt (tree arg
, tree type
)
7653 enum built_in_function fcode
;
7656 if (!validate_arg (arg
, REAL_TYPE
))
7659 /* Calculate the result when the argument is a constant. */
7660 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7663 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7664 fcode
= builtin_mathfn_code (arg
);
7665 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7667 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7668 arg
= fold_build2 (MULT_EXPR
, type
,
7669 CALL_EXPR_ARG (arg
, 0),
7670 build_real (type
, dconsthalf
));
7671 return build_call_expr (expfn
, 1, arg
);
7674 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7675 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7677 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7681 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7683 /* The inner root was either sqrt or cbrt. */
7684 /* This was a conditional expression but it triggered a bug
7685 in the Solaris 8 compiler. */
7686 REAL_VALUE_TYPE dconstroot
;
7687 if (BUILTIN_SQRT_P (fcode
))
7688 dconstroot
= dconsthalf
;
7690 dconstroot
= dconst_third ();
7692 /* Adjust for the outer root. */
7693 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7694 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7695 tree_root
= build_real (type
, dconstroot
);
7696 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7700 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7701 if (flag_unsafe_math_optimizations
7702 && (fcode
== BUILT_IN_POW
7703 || fcode
== BUILT_IN_POWF
7704 || fcode
== BUILT_IN_POWL
))
7706 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7707 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7708 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7710 if (!tree_expr_nonnegative_p (arg0
))
7711 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7712 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7713 build_real (type
, dconsthalf
));
7714 return build_call_expr (powfn
, 2, arg0
, narg1
);
7720 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7721 Return NULL_TREE if no simplification can be made. */
7724 fold_builtin_cbrt (tree arg
, tree type
)
7726 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7729 if (!validate_arg (arg
, REAL_TYPE
))
7732 /* Calculate the result when the argument is a constant. */
7733 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7736 if (flag_unsafe_math_optimizations
)
7738 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7739 if (BUILTIN_EXPONENT_P (fcode
))
7741 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7742 const REAL_VALUE_TYPE third_trunc
=
7743 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7744 arg
= fold_build2 (MULT_EXPR
, type
,
7745 CALL_EXPR_ARG (arg
, 0),
7746 build_real (type
, third_trunc
));
7747 return build_call_expr (expfn
, 1, arg
);
7750 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7751 if (BUILTIN_SQRT_P (fcode
))
7753 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7757 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7759 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7761 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7762 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7763 tree_root
= build_real (type
, dconstroot
);
7764 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7768 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7769 if (BUILTIN_CBRT_P (fcode
))
7771 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7772 if (tree_expr_nonnegative_p (arg0
))
7774 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7779 REAL_VALUE_TYPE dconstroot
;
7781 real_arithmetic (&dconstroot
, MULT_EXPR
,
7782 dconst_third_ptr (), dconst_third_ptr ());
7783 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7784 tree_root
= build_real (type
, dconstroot
);
7785 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7790 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7791 if (fcode
== BUILT_IN_POW
7792 || fcode
== BUILT_IN_POWF
7793 || fcode
== BUILT_IN_POWL
)
7795 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7796 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7797 if (tree_expr_nonnegative_p (arg00
))
7799 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7800 const REAL_VALUE_TYPE dconstroot
7801 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7802 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7803 build_real (type
, dconstroot
));
7804 return build_call_expr (powfn
, 2, arg00
, narg01
);
7811 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7812 TYPE is the type of the return value. Return NULL_TREE if no
7813 simplification can be made. */
7816 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7820 if (!validate_arg (arg
, REAL_TYPE
))
7823 /* Calculate the result when the argument is a constant. */
7824 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7827 /* Optimize cos(-x) into cos (x). */
7828 if ((narg
= fold_strip_sign_ops (arg
)))
7829 return build_call_expr (fndecl
, 1, narg
);
7834 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7835 Return NULL_TREE if no simplification can be made. */
7838 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7840 if (validate_arg (arg
, REAL_TYPE
))
7844 /* Calculate the result when the argument is a constant. */
7845 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7848 /* Optimize cosh(-x) into cosh (x). */
7849 if ((narg
= fold_strip_sign_ops (arg
)))
7850 return build_call_expr (fndecl
, 1, narg
);
7856 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7860 fold_builtin_tan (tree arg
, tree type
)
7862 enum built_in_function fcode
;
7865 if (!validate_arg (arg
, REAL_TYPE
))
7868 /* Calculate the result when the argument is a constant. */
7869 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7872 /* Optimize tan(atan(x)) = x. */
7873 fcode
= builtin_mathfn_code (arg
);
7874 if (flag_unsafe_math_optimizations
7875 && (fcode
== BUILT_IN_ATAN
7876 || fcode
== BUILT_IN_ATANF
7877 || fcode
== BUILT_IN_ATANL
))
7878 return CALL_EXPR_ARG (arg
, 0);
7883 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7884 NULL_TREE if no simplification can be made. */
7887 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7892 if (!validate_arg (arg0
, REAL_TYPE
)
7893 || !validate_arg (arg1
, POINTER_TYPE
)
7894 || !validate_arg (arg2
, POINTER_TYPE
))
7897 type
= TREE_TYPE (arg0
);
7899 /* Calculate the result when the argument is a constant. */
7900 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7903 /* Canonicalize sincos to cexpi. */
7904 if (!TARGET_C99_FUNCTIONS
)
7906 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7910 call
= build_call_expr (fn
, 1, arg0
);
7911 call
= builtin_save_expr (call
);
7913 return build2 (COMPOUND_EXPR
, type
,
7914 build2 (MODIFY_EXPR
, void_type_node
,
7915 build_fold_indirect_ref (arg1
),
7916 build1 (IMAGPART_EXPR
, type
, call
)),
7917 build2 (MODIFY_EXPR
, void_type_node
,
7918 build_fold_indirect_ref (arg2
),
7919 build1 (REALPART_EXPR
, type
, call
)));
7922 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7923 NULL_TREE if no simplification can be made. */
7926 fold_builtin_cexp (tree arg0
, tree type
)
7929 tree realp
, imagp
, ifn
;
7931 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7934 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7936 /* In case we can figure out the real part of arg0 and it is constant zero
7938 if (!TARGET_C99_FUNCTIONS
)
7940 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7944 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7945 && real_zerop (realp
))
7947 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7948 return build_call_expr (ifn
, 1, narg
);
7951 /* In case we can easily decompose real and imaginary parts split cexp
7952 to exp (r) * cexpi (i). */
7953 if (flag_unsafe_math_optimizations
7956 tree rfn
, rcall
, icall
;
7958 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7962 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7966 icall
= build_call_expr (ifn
, 1, imagp
);
7967 icall
= builtin_save_expr (icall
);
7968 rcall
= build_call_expr (rfn
, 1, realp
);
7969 rcall
= builtin_save_expr (rcall
);
7970 return fold_build2 (COMPLEX_EXPR
, type
,
7971 fold_build2 (MULT_EXPR
, rtype
,
7973 fold_build1 (REALPART_EXPR
, rtype
, icall
)),
7974 fold_build2 (MULT_EXPR
, rtype
,
7976 fold_build1 (IMAGPART_EXPR
, rtype
, icall
)));
7982 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7983 Return NULL_TREE if no simplification can be made. */
7986 fold_builtin_trunc (tree fndecl
, tree arg
)
7988 if (!validate_arg (arg
, REAL_TYPE
))
7991 /* Optimize trunc of constant value. */
7992 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7994 REAL_VALUE_TYPE r
, x
;
7995 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7997 x
= TREE_REAL_CST (arg
);
7998 real_trunc (&r
, TYPE_MODE (type
), &x
);
7999 return build_real (type
, r
);
8002 return fold_trunc_transparent_mathfn (fndecl
, arg
);
8005 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8006 Return NULL_TREE if no simplification can be made. */
8009 fold_builtin_floor (tree fndecl
, tree arg
)
8011 if (!validate_arg (arg
, REAL_TYPE
))
8014 /* Optimize floor of constant value. */
8015 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8019 x
= TREE_REAL_CST (arg
);
8020 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8022 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8025 real_floor (&r
, TYPE_MODE (type
), &x
);
8026 return build_real (type
, r
);
8030 /* Fold floor (x) where x is nonnegative to trunc (x). */
8031 if (tree_expr_nonnegative_p (arg
))
8033 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8035 return build_call_expr (truncfn
, 1, arg
);
8038 return fold_trunc_transparent_mathfn (fndecl
, arg
);
8041 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8042 Return NULL_TREE if no simplification can be made. */
8045 fold_builtin_ceil (tree fndecl
, tree arg
)
8047 if (!validate_arg (arg
, REAL_TYPE
))
8050 /* Optimize ceil of constant value. */
8051 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8055 x
= TREE_REAL_CST (arg
);
8056 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8058 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8061 real_ceil (&r
, TYPE_MODE (type
), &x
);
8062 return build_real (type
, r
);
8066 return fold_trunc_transparent_mathfn (fndecl
, arg
);
8069 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8070 Return NULL_TREE if no simplification can be made. */
8073 fold_builtin_round (tree fndecl
, tree arg
)
8075 if (!validate_arg (arg
, REAL_TYPE
))
8078 /* Optimize round of constant value. */
8079 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8083 x
= TREE_REAL_CST (arg
);
8084 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8086 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8089 real_round (&r
, TYPE_MODE (type
), &x
);
8090 return build_real (type
, r
);
8094 return fold_trunc_transparent_mathfn (fndecl
, arg
);
8097 /* Fold function call to builtin lround, lroundf or lroundl (or the
8098 corresponding long long versions) and other rounding functions. ARG
8099 is the argument to the call. Return NULL_TREE if no simplification
8103 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
8105 if (!validate_arg (arg
, REAL_TYPE
))
8108 /* Optimize lround of constant value. */
8109 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8111 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8113 if (real_isfinite (&x
))
8115 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8116 tree ftype
= TREE_TYPE (arg
);
8117 unsigned HOST_WIDE_INT lo2
;
8118 HOST_WIDE_INT hi
, lo
;
8121 switch (DECL_FUNCTION_CODE (fndecl
))
8123 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8124 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8125 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8128 CASE_FLT_FN (BUILT_IN_LCEIL
):
8129 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8130 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8133 CASE_FLT_FN (BUILT_IN_LROUND
):
8134 CASE_FLT_FN (BUILT_IN_LLROUND
):
8135 real_round (&r
, TYPE_MODE (ftype
), &x
);
8142 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
8143 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
8144 return build_int_cst_wide (itype
, lo2
, hi
);
8148 switch (DECL_FUNCTION_CODE (fndecl
))
8150 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8151 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8152 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8153 if (tree_expr_nonnegative_p (arg
))
8154 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
8160 return fold_fixed_mathfn (fndecl
, arg
);
8163 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8164 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8165 the argument to the call. Return NULL_TREE if no simplification can
8169 fold_builtin_bitop (tree fndecl
, tree arg
)
8171 if (!validate_arg (arg
, INTEGER_TYPE
))
8174 /* Optimize for constant argument. */
8175 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8177 HOST_WIDE_INT hi
, width
, result
;
8178 unsigned HOST_WIDE_INT lo
;
8181 type
= TREE_TYPE (arg
);
8182 width
= TYPE_PRECISION (type
);
8183 lo
= TREE_INT_CST_LOW (arg
);
8185 /* Clear all the bits that are beyond the type's precision. */
8186 if (width
> HOST_BITS_PER_WIDE_INT
)
8188 hi
= TREE_INT_CST_HIGH (arg
);
8189 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
8190 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
8195 if (width
< HOST_BITS_PER_WIDE_INT
)
8196 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
8199 switch (DECL_FUNCTION_CODE (fndecl
))
8201 CASE_INT_FN (BUILT_IN_FFS
):
8203 result
= exact_log2 (lo
& -lo
) + 1;
8205 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
8210 CASE_INT_FN (BUILT_IN_CLZ
):
8212 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8214 result
= width
- floor_log2 (lo
) - 1;
8215 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8219 CASE_INT_FN (BUILT_IN_CTZ
):
8221 result
= exact_log2 (lo
& -lo
);
8223 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
8224 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8228 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8231 result
++, lo
&= lo
- 1;
8233 result
++, hi
&= hi
- 1;
8236 CASE_INT_FN (BUILT_IN_PARITY
):
8239 result
++, lo
&= lo
- 1;
8241 result
++, hi
&= hi
- 1;
8249 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8255 /* Fold function call to builtin_bswap and the long and long long
8256 variants. Return NULL_TREE if no simplification can be made. */
8258 fold_builtin_bswap (tree fndecl
, tree arg
)
8260 if (! validate_arg (arg
, INTEGER_TYPE
))
8263 /* Optimize constant value. */
8264 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8266 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8267 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8270 type
= TREE_TYPE (arg
);
8271 width
= TYPE_PRECISION (type
);
8272 lo
= TREE_INT_CST_LOW (arg
);
8273 hi
= TREE_INT_CST_HIGH (arg
);
8275 switch (DECL_FUNCTION_CODE (fndecl
))
8277 case BUILT_IN_BSWAP32
:
8278 case BUILT_IN_BSWAP64
:
8282 for (s
= 0; s
< width
; s
+= 8)
8284 int d
= width
- s
- 8;
8285 unsigned HOST_WIDE_INT byte
;
8287 if (s
< HOST_BITS_PER_WIDE_INT
)
8288 byte
= (lo
>> s
) & 0xff;
8290 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8292 if (d
< HOST_BITS_PER_WIDE_INT
)
8295 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8305 if (width
< HOST_BITS_PER_WIDE_INT
)
8306 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
8308 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
8314 /* Return true if EXPR is the real constant contained in VALUE. */
8317 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
8321 return ((TREE_CODE (expr
) == REAL_CST
8322 && !TREE_OVERFLOW (expr
)
8323 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
8324 || (TREE_CODE (expr
) == COMPLEX_CST
8325 && real_dconstp (TREE_REALPART (expr
), value
)
8326 && real_zerop (TREE_IMAGPART (expr
))));
8329 /* A subroutine of fold_builtin to fold the various logarithmic
8330 functions. Return NULL_TREE if no simplification can me made.
8331 FUNC is the corresponding MPFR logarithm function. */
8334 fold_builtin_logarithm (tree fndecl
, tree arg
,
8335 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8337 if (validate_arg (arg
, REAL_TYPE
))
8339 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8341 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8343 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8344 instead we'll look for 'e' truncated to MODE. So only do
8345 this if flag_unsafe_math_optimizations is set. */
8346 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
8348 const REAL_VALUE_TYPE e_truncated
=
8349 real_value_truncate (TYPE_MODE (type
), dconst_e ());
8350 if (real_dconstp (arg
, &e_truncated
))
8351 return build_real (type
, dconst1
);
8354 /* Calculate the result when the argument is a constant. */
8355 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8358 /* Special case, optimize logN(expN(x)) = x. */
8359 if (flag_unsafe_math_optimizations
8360 && ((func
== mpfr_log
8361 && (fcode
== BUILT_IN_EXP
8362 || fcode
== BUILT_IN_EXPF
8363 || fcode
== BUILT_IN_EXPL
))
8364 || (func
== mpfr_log2
8365 && (fcode
== BUILT_IN_EXP2
8366 || fcode
== BUILT_IN_EXP2F
8367 || fcode
== BUILT_IN_EXP2L
))
8368 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8369 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8371 /* Optimize logN(func()) for various exponential functions. We
8372 want to determine the value "x" and the power "exponent" in
8373 order to transform logN(x**exponent) into exponent*logN(x). */
8374 if (flag_unsafe_math_optimizations
)
8376 tree exponent
= 0, x
= 0;
8380 CASE_FLT_FN (BUILT_IN_EXP
):
8381 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8382 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8384 exponent
= CALL_EXPR_ARG (arg
, 0);
8386 CASE_FLT_FN (BUILT_IN_EXP2
):
8387 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8388 x
= build_real (type
, dconst2
);
8389 exponent
= CALL_EXPR_ARG (arg
, 0);
8391 CASE_FLT_FN (BUILT_IN_EXP10
):
8392 CASE_FLT_FN (BUILT_IN_POW10
):
8393 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8395 REAL_VALUE_TYPE dconst10
;
8396 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8397 x
= build_real (type
, dconst10
);
8399 exponent
= CALL_EXPR_ARG (arg
, 0);
8401 CASE_FLT_FN (BUILT_IN_SQRT
):
8402 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8403 x
= CALL_EXPR_ARG (arg
, 0);
8404 exponent
= build_real (type
, dconsthalf
);
8406 CASE_FLT_FN (BUILT_IN_CBRT
):
8407 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8408 x
= CALL_EXPR_ARG (arg
, 0);
8409 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8412 CASE_FLT_FN (BUILT_IN_POW
):
8413 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8414 x
= CALL_EXPR_ARG (arg
, 0);
8415 exponent
= CALL_EXPR_ARG (arg
, 1);
8421 /* Now perform the optimization. */
8424 tree logfn
= build_call_expr (fndecl
, 1, x
);
8425 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
8433 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8434 NULL_TREE if no simplification can be made. */
8437 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8439 tree res
, narg0
, narg1
;
8441 if (!validate_arg (arg0
, REAL_TYPE
)
8442 || !validate_arg (arg1
, REAL_TYPE
))
8445 /* Calculate the result when the argument is a constant. */
8446 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8449 /* If either argument to hypot has a negate or abs, strip that off.
8450 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8451 narg0
= fold_strip_sign_ops (arg0
);
8452 narg1
= fold_strip_sign_ops (arg1
);
8455 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
8456 narg1
? narg1
: arg1
);
8459 /* If either argument is zero, hypot is fabs of the other. */
8460 if (real_zerop (arg0
))
8461 return fold_build1 (ABS_EXPR
, type
, arg1
);
8462 else if (real_zerop (arg1
))
8463 return fold_build1 (ABS_EXPR
, type
, arg0
);
8465 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8466 if (flag_unsafe_math_optimizations
8467 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8469 const REAL_VALUE_TYPE sqrt2_trunc
8470 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8471 return fold_build2 (MULT_EXPR
, type
,
8472 fold_build1 (ABS_EXPR
, type
, arg0
),
8473 build_real (type
, sqrt2_trunc
));
8480 /* Fold a builtin function call to pow, powf, or powl. Return
8481 NULL_TREE if no simplification can be made. */
8483 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8487 if (!validate_arg (arg0
, REAL_TYPE
)
8488 || !validate_arg (arg1
, REAL_TYPE
))
8491 /* Calculate the result when the argument is a constant. */
8492 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8495 /* Optimize pow(1.0,y) = 1.0. */
8496 if (real_onep (arg0
))
8497 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8499 if (TREE_CODE (arg1
) == REAL_CST
8500 && !TREE_OVERFLOW (arg1
))
8502 REAL_VALUE_TYPE cint
;
8506 c
= TREE_REAL_CST (arg1
);
8508 /* Optimize pow(x,0.0) = 1.0. */
8509 if (REAL_VALUES_EQUAL (c
, dconst0
))
8510 return omit_one_operand (type
, build_real (type
, dconst1
),
8513 /* Optimize pow(x,1.0) = x. */
8514 if (REAL_VALUES_EQUAL (c
, dconst1
))
8517 /* Optimize pow(x,-1.0) = 1.0/x. */
8518 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8519 return fold_build2 (RDIV_EXPR
, type
,
8520 build_real (type
, dconst1
), arg0
);
8522 /* Optimize pow(x,0.5) = sqrt(x). */
8523 if (flag_unsafe_math_optimizations
8524 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8526 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8528 if (sqrtfn
!= NULL_TREE
)
8529 return build_call_expr (sqrtfn
, 1, arg0
);
8532 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8533 if (flag_unsafe_math_optimizations
)
8535 const REAL_VALUE_TYPE dconstroot
8536 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8538 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8540 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8541 if (cbrtfn
!= NULL_TREE
)
8542 return build_call_expr (cbrtfn
, 1, arg0
);
8546 /* Check for an integer exponent. */
8547 n
= real_to_integer (&c
);
8548 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8549 if (real_identical (&c
, &cint
))
8551 /* Attempt to evaluate pow at compile-time, unless this should
8552 raise an exception. */
8553 if (TREE_CODE (arg0
) == REAL_CST
8554 && !TREE_OVERFLOW (arg0
)
8556 || (!flag_trapping_math
&& !flag_errno_math
)
8557 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8562 x
= TREE_REAL_CST (arg0
);
8563 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8564 if (flag_unsafe_math_optimizations
|| !inexact
)
8565 return build_real (type
, x
);
8568 /* Strip sign ops from even integer powers. */
8569 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8571 tree narg0
= fold_strip_sign_ops (arg0
);
8573 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8578 if (flag_unsafe_math_optimizations
)
8580 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8582 /* Optimize pow(expN(x),y) = expN(x*y). */
8583 if (BUILTIN_EXPONENT_P (fcode
))
8585 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8586 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8587 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8588 return build_call_expr (expfn
, 1, arg
);
8591 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8592 if (BUILTIN_SQRT_P (fcode
))
8594 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8595 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8596 build_real (type
, dconsthalf
));
8597 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8600 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8601 if (BUILTIN_CBRT_P (fcode
))
8603 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8604 if (tree_expr_nonnegative_p (arg
))
8606 const REAL_VALUE_TYPE dconstroot
8607 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8608 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8609 build_real (type
, dconstroot
));
8610 return build_call_expr (fndecl
, 2, arg
, narg1
);
8614 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8615 if (fcode
== BUILT_IN_POW
8616 || fcode
== BUILT_IN_POWF
8617 || fcode
== BUILT_IN_POWL
)
8619 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8620 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8621 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8622 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8629 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8630 Return NULL_TREE if no simplification can be made. */
8632 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8633 tree arg0
, tree arg1
, tree type
)
8635 if (!validate_arg (arg0
, REAL_TYPE
)
8636 || !validate_arg (arg1
, INTEGER_TYPE
))
8639 /* Optimize pow(1.0,y) = 1.0. */
8640 if (real_onep (arg0
))
8641 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8643 if (host_integerp (arg1
, 0))
8645 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8647 /* Evaluate powi at compile-time. */
8648 if (TREE_CODE (arg0
) == REAL_CST
8649 && !TREE_OVERFLOW (arg0
))
8652 x
= TREE_REAL_CST (arg0
);
8653 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8654 return build_real (type
, x
);
8657 /* Optimize pow(x,0) = 1.0. */
8659 return omit_one_operand (type
, build_real (type
, dconst1
),
8662 /* Optimize pow(x,1) = x. */
8666 /* Optimize pow(x,-1) = 1.0/x. */
8668 return fold_build2 (RDIV_EXPR
, type
,
8669 build_real (type
, dconst1
), arg0
);
8675 /* A subroutine of fold_builtin to fold the various exponent
8676 functions. Return NULL_TREE if no simplification can be made.
8677 FUNC is the corresponding MPFR exponent function. */
8680 fold_builtin_exponent (tree fndecl
, tree arg
,
8681 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8683 if (validate_arg (arg
, REAL_TYPE
))
8685 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8688 /* Calculate the result when the argument is a constant. */
8689 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8692 /* Optimize expN(logN(x)) = x. */
8693 if (flag_unsafe_math_optimizations
)
8695 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8697 if ((func
== mpfr_exp
8698 && (fcode
== BUILT_IN_LOG
8699 || fcode
== BUILT_IN_LOGF
8700 || fcode
== BUILT_IN_LOGL
))
8701 || (func
== mpfr_exp2
8702 && (fcode
== BUILT_IN_LOG2
8703 || fcode
== BUILT_IN_LOG2F
8704 || fcode
== BUILT_IN_LOG2L
))
8705 || (func
== mpfr_exp10
8706 && (fcode
== BUILT_IN_LOG10
8707 || fcode
== BUILT_IN_LOG10F
8708 || fcode
== BUILT_IN_LOG10L
)))
8709 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8716 /* Return true if VAR is a VAR_DECL or a component thereof. */
8719 var_decl_component_p (tree var
)
8722 while (handled_component_p (inner
))
8723 inner
= TREE_OPERAND (inner
, 0);
8724 return SSA_VAR_P (inner
);
8727 /* Fold function call to builtin memset. Return
8728 NULL_TREE if no simplification can be made. */
8731 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8734 unsigned HOST_WIDE_INT length
, cval
;
8736 if (! validate_arg (dest
, POINTER_TYPE
)
8737 || ! validate_arg (c
, INTEGER_TYPE
)
8738 || ! validate_arg (len
, INTEGER_TYPE
))
8741 if (! host_integerp (len
, 1))
8744 /* If the LEN parameter is zero, return DEST. */
8745 if (integer_zerop (len
))
8746 return omit_one_operand (type
, dest
, c
);
8748 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8753 if (TREE_CODE (var
) != ADDR_EXPR
)
8756 var
= TREE_OPERAND (var
, 0);
8757 if (TREE_THIS_VOLATILE (var
))
8760 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8761 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8764 if (! var_decl_component_p (var
))
8767 length
= tree_low_cst (len
, 1);
8768 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8769 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8773 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8776 if (integer_zerop (c
))
8780 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8783 cval
= tree_low_cst (c
, 1);
8787 cval
|= (cval
<< 31) << 1;
8790 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8791 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8795 return omit_one_operand (type
, dest
, ret
);
8798 /* Fold function call to builtin memset. Return
8799 NULL_TREE if no simplification can be made. */
8802 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8804 if (! validate_arg (dest
, POINTER_TYPE
)
8805 || ! validate_arg (size
, INTEGER_TYPE
))
8811 /* New argument list transforming bzero(ptr x, int y) to
8812 memset(ptr x, int 0, size_t y). This is done this way
8813 so that if it isn't expanded inline, we fallback to
8814 calling bzero instead of memset. */
8816 return fold_builtin_memset (dest
, integer_zero_node
,
8817 fold_convert (sizetype
, size
),
8818 void_type_node
, ignore
);
8821 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8822 NULL_TREE if no simplification can be made.
8823 If ENDP is 0, return DEST (like memcpy).
8824 If ENDP is 1, return DEST+LEN (like mempcpy).
8825 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8826 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8830 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8832 tree destvar
, srcvar
, expr
;
8834 if (! validate_arg (dest
, POINTER_TYPE
)
8835 || ! validate_arg (src
, POINTER_TYPE
)
8836 || ! validate_arg (len
, INTEGER_TYPE
))
8839 /* If the LEN parameter is zero, return DEST. */
8840 if (integer_zerop (len
))
8841 return omit_one_operand (type
, dest
, src
);
8843 /* If SRC and DEST are the same (and not volatile), return
8844 DEST{,+LEN,+LEN-1}. */
8845 if (operand_equal_p (src
, dest
, 0))
8849 tree srctype
, desttype
;
8850 int src_align
, dest_align
;
8854 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8855 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8857 /* Both DEST and SRC must be pointer types.
8858 ??? This is what old code did. Is the testing for pointer types
8861 If either SRC is readonly or length is 1, we can use memcpy. */
8862 if (dest_align
&& src_align
8863 && (readonly_data_expr (src
)
8864 || (host_integerp (len
, 1)
8865 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8866 tree_low_cst (len
, 1)))))
8868 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8871 return build_call_expr (fn
, 3, dest
, src
, len
);
8876 if (!host_integerp (len
, 0))
8879 This logic lose for arguments like (type *)malloc (sizeof (type)),
8880 since we strip the casts of up to VOID return value from malloc.
8881 Perhaps we ought to inherit type from non-VOID argument here? */
8884 srctype
= TREE_TYPE (TREE_TYPE (src
));
8885 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8886 if (!srctype
|| !desttype
8887 || !TYPE_SIZE_UNIT (srctype
)
8888 || !TYPE_SIZE_UNIT (desttype
)
8889 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8890 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
)
8893 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8894 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8895 if (dest_align
< (int) TYPE_ALIGN (desttype
)
8896 || src_align
< (int) TYPE_ALIGN (srctype
))
8900 dest
= builtin_save_expr (dest
);
8903 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8905 srcvar
= build_fold_indirect_ref (src
);
8906 if (TREE_THIS_VOLATILE (srcvar
))
8908 else if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8910 /* With memcpy, it is possible to bypass aliasing rules, so without
8911 this check i.e. execute/20060930-2.c would be misoptimized,
8912 because it use conflicting alias set to hold argument for the
8913 memcpy call. This check is probably unnecessary with
8914 -fno-strict-aliasing. Similarly for destvar. See also
8916 else if (!var_decl_component_p (srcvar
))
8920 destvar
= NULL_TREE
;
8921 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8923 destvar
= build_fold_indirect_ref (dest
);
8924 if (TREE_THIS_VOLATILE (destvar
))
8925 destvar
= NULL_TREE
;
8926 else if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8927 destvar
= NULL_TREE
;
8928 else if (!var_decl_component_p (destvar
))
8929 destvar
= NULL_TREE
;
8932 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8935 if (srcvar
== NULL_TREE
)
8938 if (TREE_ADDRESSABLE (TREE_TYPE (destvar
)))
8942 if (src_align
< (int) TYPE_ALIGN (srctype
))
8944 if (AGGREGATE_TYPE_P (srctype
)
8945 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype
), src_align
))
8948 srctype
= build_variant_type_copy (srctype
);
8949 TYPE_ALIGN (srctype
) = src_align
;
8950 TYPE_USER_ALIGN (srctype
) = 1;
8951 TYPE_PACKED (srctype
) = 1;
8953 srcptype
= build_pointer_type_for_mode (srctype
, ptr_mode
, true);
8954 src
= fold_convert (srcptype
, src
);
8955 srcvar
= build_fold_indirect_ref (src
);
8957 else if (destvar
== NULL_TREE
)
8960 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar
)))
8964 if (dest_align
< (int) TYPE_ALIGN (desttype
))
8966 if (AGGREGATE_TYPE_P (desttype
)
8967 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype
), dest_align
))
8970 desttype
= build_variant_type_copy (desttype
);
8971 TYPE_ALIGN (desttype
) = dest_align
;
8972 TYPE_USER_ALIGN (desttype
) = 1;
8973 TYPE_PACKED (desttype
) = 1;
8975 destptype
= build_pointer_type_for_mode (desttype
, ptr_mode
, true);
8976 dest
= fold_convert (destptype
, dest
);
8977 destvar
= build_fold_indirect_ref (dest
);
8980 if (srctype
== desttype
8981 || (gimple_in_ssa_p (cfun
)
8982 && useless_type_conversion_p (desttype
, srctype
)))
8984 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8985 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8986 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8987 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8988 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8990 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8991 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8997 if (endp
== 0 || endp
== 3)
8998 return omit_one_operand (type
, dest
, expr
);
9004 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
9007 len
= fold_convert (sizetype
, len
);
9008 dest
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
9009 dest
= fold_convert (type
, dest
);
9011 dest
= omit_one_operand (type
, dest
, expr
);
9015 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9016 If LEN is not NULL, it represents the length of the string to be
9017 copied. Return NULL_TREE if no simplification can be made. */
9020 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
9024 if (!validate_arg (dest
, POINTER_TYPE
)
9025 || !validate_arg (src
, POINTER_TYPE
))
9028 /* If SRC and DEST are the same (and not volatile), return DEST. */
9029 if (operand_equal_p (src
, dest
, 0))
9030 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
9032 if (optimize_function_for_size_p (cfun
))
9035 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
9041 len
= c_strlen (src
, 1);
9042 if (! len
|| TREE_SIDE_EFFECTS (len
))
9046 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
9047 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
9048 build_call_expr (fn
, 3, dest
, src
, len
));
9051 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9052 If SLEN is not NULL, it represents the length of the source string.
9053 Return NULL_TREE if no simplification can be made. */
9056 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
9060 if (!validate_arg (dest
, POINTER_TYPE
)
9061 || !validate_arg (src
, POINTER_TYPE
)
9062 || !validate_arg (len
, INTEGER_TYPE
))
9065 /* If the LEN parameter is zero, return DEST. */
9066 if (integer_zerop (len
))
9067 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
9069 /* We can't compare slen with len as constants below if len is not a
9071 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
9075 slen
= c_strlen (src
, 1);
9077 /* Now, we must be passed a constant src ptr parameter. */
9078 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
9081 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
9083 /* We do not support simplification of this case, though we do
9084 support it when expanding trees into RTL. */
9085 /* FIXME: generate a call to __builtin_memset. */
9086 if (tree_int_cst_lt (slen
, len
))
9089 /* OK transform into builtin memcpy. */
9090 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
9093 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
9094 build_call_expr (fn
, 3, dest
, src
, len
));
9097 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9098 arguments to the call, and TYPE is its return type.
9099 Return NULL_TREE if no simplification can be made. */
9102 fold_builtin_memchr (tree arg1
, tree arg2
, tree len
, tree type
)
9104 if (!validate_arg (arg1
, POINTER_TYPE
)
9105 || !validate_arg (arg2
, INTEGER_TYPE
)
9106 || !validate_arg (len
, INTEGER_TYPE
))
9112 if (TREE_CODE (arg2
) != INTEGER_CST
9113 || !host_integerp (len
, 1))
9116 p1
= c_getstr (arg1
);
9117 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9123 if (target_char_cast (arg2
, &c
))
9126 r
= (char *) memchr (p1
, c
, tree_low_cst (len
, 1));
9129 return build_int_cst (TREE_TYPE (arg1
), 0);
9131 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
9133 return fold_convert (type
, tem
);
9139 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9140 Return NULL_TREE if no simplification can be made. */
9143 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
9145 const char *p1
, *p2
;
9147 if (!validate_arg (arg1
, POINTER_TYPE
)
9148 || !validate_arg (arg2
, POINTER_TYPE
)
9149 || !validate_arg (len
, INTEGER_TYPE
))
9152 /* If the LEN parameter is zero, return zero. */
9153 if (integer_zerop (len
))
9154 return omit_two_operands (integer_type_node
, integer_zero_node
,
9157 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9158 if (operand_equal_p (arg1
, arg2
, 0))
9159 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
9161 p1
= c_getstr (arg1
);
9162 p2
= c_getstr (arg2
);
9164 /* If all arguments are constant, and the value of len is not greater
9165 than the lengths of arg1 and arg2, evaluate at compile-time. */
9166 if (host_integerp (len
, 1) && p1
&& p2
9167 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9168 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9170 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9173 return integer_one_node
;
9175 return integer_minus_one_node
;
9177 return integer_zero_node
;
9180 /* If len parameter is one, return an expression corresponding to
9181 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9182 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9184 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9185 tree cst_uchar_ptr_node
9186 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9188 tree ind1
= fold_convert (integer_type_node
,
9189 build1 (INDIRECT_REF
, cst_uchar_node
,
9190 fold_convert (cst_uchar_ptr_node
,
9192 tree ind2
= fold_convert (integer_type_node
,
9193 build1 (INDIRECT_REF
, cst_uchar_node
,
9194 fold_convert (cst_uchar_ptr_node
,
9196 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9202 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9203 Return NULL_TREE if no simplification can be made. */
9206 fold_builtin_strcmp (tree arg1
, tree arg2
)
9208 const char *p1
, *p2
;
9210 if (!validate_arg (arg1
, POINTER_TYPE
)
9211 || !validate_arg (arg2
, POINTER_TYPE
))
9214 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9215 if (operand_equal_p (arg1
, arg2
, 0))
9216 return integer_zero_node
;
9218 p1
= c_getstr (arg1
);
9219 p2
= c_getstr (arg2
);
9223 const int i
= strcmp (p1
, p2
);
9225 return integer_minus_one_node
;
9227 return integer_one_node
;
9229 return integer_zero_node
;
9232 /* If the second arg is "", return *(const unsigned char*)arg1. */
9233 if (p2
&& *p2
== '\0')
9235 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9236 tree cst_uchar_ptr_node
9237 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9239 return fold_convert (integer_type_node
,
9240 build1 (INDIRECT_REF
, cst_uchar_node
,
9241 fold_convert (cst_uchar_ptr_node
,
9245 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9246 if (p1
&& *p1
== '\0')
9248 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9249 tree cst_uchar_ptr_node
9250 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9252 tree temp
= fold_convert (integer_type_node
,
9253 build1 (INDIRECT_REF
, cst_uchar_node
,
9254 fold_convert (cst_uchar_ptr_node
,
9256 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9262 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9263 Return NULL_TREE if no simplification can be made. */
9266 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
9268 const char *p1
, *p2
;
9270 if (!validate_arg (arg1
, POINTER_TYPE
)
9271 || !validate_arg (arg2
, POINTER_TYPE
)
9272 || !validate_arg (len
, INTEGER_TYPE
))
9275 /* If the LEN parameter is zero, return zero. */
9276 if (integer_zerop (len
))
9277 return omit_two_operands (integer_type_node
, integer_zero_node
,
9280 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9281 if (operand_equal_p (arg1
, arg2
, 0))
9282 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
9284 p1
= c_getstr (arg1
);
9285 p2
= c_getstr (arg2
);
9287 if (host_integerp (len
, 1) && p1
&& p2
)
9289 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9291 return integer_one_node
;
9293 return integer_minus_one_node
;
9295 return integer_zero_node
;
9298 /* If the second arg is "", and the length is greater than zero,
9299 return *(const unsigned char*)arg1. */
9300 if (p2
&& *p2
== '\0'
9301 && TREE_CODE (len
) == INTEGER_CST
9302 && tree_int_cst_sgn (len
) == 1)
9304 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9305 tree cst_uchar_ptr_node
9306 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9308 return fold_convert (integer_type_node
,
9309 build1 (INDIRECT_REF
, cst_uchar_node
,
9310 fold_convert (cst_uchar_ptr_node
,
9314 /* If the first arg is "", and the length is greater than zero,
9315 return -*(const unsigned char*)arg2. */
9316 if (p1
&& *p1
== '\0'
9317 && TREE_CODE (len
) == INTEGER_CST
9318 && tree_int_cst_sgn (len
) == 1)
9320 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9321 tree cst_uchar_ptr_node
9322 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9324 tree temp
= fold_convert (integer_type_node
,
9325 build1 (INDIRECT_REF
, cst_uchar_node
,
9326 fold_convert (cst_uchar_ptr_node
,
9328 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9331 /* If len parameter is one, return an expression corresponding to
9332 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9333 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9335 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9336 tree cst_uchar_ptr_node
9337 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9339 tree ind1
= fold_convert (integer_type_node
,
9340 build1 (INDIRECT_REF
, cst_uchar_node
,
9341 fold_convert (cst_uchar_ptr_node
,
9343 tree ind2
= fold_convert (integer_type_node
,
9344 build1 (INDIRECT_REF
, cst_uchar_node
,
9345 fold_convert (cst_uchar_ptr_node
,
9347 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9353 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9354 ARG. Return NULL_TREE if no simplification can be made. */
9357 fold_builtin_signbit (tree arg
, tree type
)
9361 if (!validate_arg (arg
, REAL_TYPE
))
9364 /* If ARG is a compile-time constant, determine the result. */
9365 if (TREE_CODE (arg
) == REAL_CST
9366 && !TREE_OVERFLOW (arg
))
9370 c
= TREE_REAL_CST (arg
);
9371 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
9372 return fold_convert (type
, temp
);
9375 /* If ARG is non-negative, the result is always zero. */
9376 if (tree_expr_nonnegative_p (arg
))
9377 return omit_one_operand (type
, integer_zero_node
, arg
);
9379 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9380 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9381 return fold_build2 (LT_EXPR
, type
, arg
,
9382 build_real (TREE_TYPE (arg
), dconst0
));
9387 /* Fold function call to builtin copysign, copysignf or copysignl with
9388 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9392 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
9396 if (!validate_arg (arg1
, REAL_TYPE
)
9397 || !validate_arg (arg2
, REAL_TYPE
))
9400 /* copysign(X,X) is X. */
9401 if (operand_equal_p (arg1
, arg2
, 0))
9402 return fold_convert (type
, arg1
);
9404 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9405 if (TREE_CODE (arg1
) == REAL_CST
9406 && TREE_CODE (arg2
) == REAL_CST
9407 && !TREE_OVERFLOW (arg1
)
9408 && !TREE_OVERFLOW (arg2
))
9410 REAL_VALUE_TYPE c1
, c2
;
9412 c1
= TREE_REAL_CST (arg1
);
9413 c2
= TREE_REAL_CST (arg2
);
9414 /* c1.sign := c2.sign. */
9415 real_copysign (&c1
, &c2
);
9416 return build_real (type
, c1
);
9419 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9420 Remember to evaluate Y for side-effects. */
9421 if (tree_expr_nonnegative_p (arg2
))
9422 return omit_one_operand (type
,
9423 fold_build1 (ABS_EXPR
, type
, arg1
),
9426 /* Strip sign changing operations for the first argument. */
9427 tem
= fold_strip_sign_ops (arg1
);
9429 return build_call_expr (fndecl
, 2, tem
, arg2
);
9434 /* Fold a call to builtin isascii with argument ARG. */
9437 fold_builtin_isascii (tree arg
)
9439 if (!validate_arg (arg
, INTEGER_TYPE
))
9443 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9444 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9445 build_int_cst (NULL_TREE
,
9446 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9447 return fold_build2 (EQ_EXPR
, integer_type_node
,
9448 arg
, integer_zero_node
);
9452 /* Fold a call to builtin toascii with argument ARG. */
9455 fold_builtin_toascii (tree arg
)
9457 if (!validate_arg (arg
, INTEGER_TYPE
))
9460 /* Transform toascii(c) -> (c & 0x7f). */
9461 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9462 build_int_cst (NULL_TREE
, 0x7f));
9465 /* Fold a call to builtin isdigit with argument ARG. */
9468 fold_builtin_isdigit (tree arg
)
9470 if (!validate_arg (arg
, INTEGER_TYPE
))
9474 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9475 /* According to the C standard, isdigit is unaffected by locale.
9476 However, it definitely is affected by the target character set. */
9477 unsigned HOST_WIDE_INT target_digit0
9478 = lang_hooks
.to_target_charset ('0');
9480 if (target_digit0
== 0)
9483 arg
= fold_convert (unsigned_type_node
, arg
);
9484 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9485 build_int_cst (unsigned_type_node
, target_digit0
));
9486 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
9487 build_int_cst (unsigned_type_node
, 9));
9491 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9494 fold_builtin_fabs (tree arg
, tree type
)
9496 if (!validate_arg (arg
, REAL_TYPE
))
9499 arg
= fold_convert (type
, arg
);
9500 if (TREE_CODE (arg
) == REAL_CST
)
9501 return fold_abs_const (arg
, type
);
9502 return fold_build1 (ABS_EXPR
, type
, arg
);
9505 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9508 fold_builtin_abs (tree arg
, tree type
)
9510 if (!validate_arg (arg
, INTEGER_TYPE
))
9513 arg
= fold_convert (type
, arg
);
9514 if (TREE_CODE (arg
) == INTEGER_CST
)
9515 return fold_abs_const (arg
, type
);
9516 return fold_build1 (ABS_EXPR
, type
, arg
);
9519 /* Fold a call to builtin fmin or fmax. */
9522 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
9524 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9526 /* Calculate the result when the argument is a constant. */
9527 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9532 /* If either argument is NaN, return the other one. Avoid the
9533 transformation if we get (and honor) a signalling NaN. Using
9534 omit_one_operand() ensures we create a non-lvalue. */
9535 if (TREE_CODE (arg0
) == REAL_CST
9536 && real_isnan (&TREE_REAL_CST (arg0
))
9537 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9538 || ! TREE_REAL_CST (arg0
).signalling
))
9539 return omit_one_operand (type
, arg1
, arg0
);
9540 if (TREE_CODE (arg1
) == REAL_CST
9541 && real_isnan (&TREE_REAL_CST (arg1
))
9542 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9543 || ! TREE_REAL_CST (arg1
).signalling
))
9544 return omit_one_operand (type
, arg0
, arg1
);
9546 /* Transform fmin/fmax(x,x) -> x. */
9547 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9548 return omit_one_operand (type
, arg0
, arg1
);
9550 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9551 functions to return the numeric arg if the other one is NaN.
9552 These tree codes don't honor that, so only transform if
9553 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9554 handled, so we don't have to worry about it either. */
9555 if (flag_finite_math_only
)
9556 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
9557 fold_convert (type
, arg0
),
9558 fold_convert (type
, arg1
));
9563 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9566 fold_builtin_carg (tree arg
, tree type
)
9568 if (validate_arg (arg
, COMPLEX_TYPE
))
9570 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9574 tree new_arg
= builtin_save_expr (arg
);
9575 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9576 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9577 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9584 /* Fold a call to builtin logb/ilogb. */
9587 fold_builtin_logb (tree arg
, tree rettype
)
9589 if (! validate_arg (arg
, REAL_TYPE
))
9594 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9596 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9602 /* If arg is Inf or NaN and we're logb, return it. */
9603 if (TREE_CODE (rettype
) == REAL_TYPE
)
9604 return fold_convert (rettype
, arg
);
9605 /* Fall through... */
9607 /* Zero may set errno and/or raise an exception for logb, also
9608 for ilogb we don't know FP_ILOGB0. */
9611 /* For normal numbers, proceed iff radix == 2. In GCC,
9612 normalized significands are in the range [0.5, 1.0). We
9613 want the exponent as if they were [1.0, 2.0) so get the
9614 exponent and subtract 1. */
9615 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9616 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9617 REAL_EXP (value
)-1));
9625 /* Fold a call to builtin significand, if radix == 2. */
9628 fold_builtin_significand (tree arg
, tree rettype
)
9630 if (! validate_arg (arg
, REAL_TYPE
))
9635 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9637 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9644 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9645 return fold_convert (rettype
, arg
);
9647 /* For normal numbers, proceed iff radix == 2. */
9648 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9650 REAL_VALUE_TYPE result
= *value
;
9651 /* In GCC, normalized significands are in the range [0.5,
9652 1.0). We want them to be [1.0, 2.0) so set the
9654 SET_REAL_EXP (&result
, 1);
9655 return build_real (rettype
, result
);
9664 /* Fold a call to builtin frexp, we can assume the base is 2. */
9667 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9669 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9674 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9677 arg1
= build_fold_indirect_ref (arg1
);
9679 /* Proceed if a valid pointer type was passed in. */
9680 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9682 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9688 /* For +-0, return (*exp = 0, +-0). */
9689 exp
= integer_zero_node
;
9694 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9695 return omit_one_operand (rettype
, arg0
, arg1
);
9698 /* Since the frexp function always expects base 2, and in
9699 GCC normalized significands are already in the range
9700 [0.5, 1.0), we have exactly what frexp wants. */
9701 REAL_VALUE_TYPE frac_rvt
= *value
;
9702 SET_REAL_EXP (&frac_rvt
, 0);
9703 frac
= build_real (rettype
, frac_rvt
);
9704 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9711 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9712 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9713 TREE_SIDE_EFFECTS (arg1
) = 1;
9714 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9720 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9721 then we can assume the base is two. If it's false, then we have to
9722 check the mode of the TYPE parameter in certain cases. */
9725 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9727 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9732 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9733 if (real_zerop (arg0
) || integer_zerop (arg1
)
9734 || (TREE_CODE (arg0
) == REAL_CST
9735 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9736 return omit_one_operand (type
, arg0
, arg1
);
9738 /* If both arguments are constant, then try to evaluate it. */
9739 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9740 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9741 && host_integerp (arg1
, 0))
9743 /* Bound the maximum adjustment to twice the range of the
9744 mode's valid exponents. Use abs to ensure the range is
9745 positive as a sanity check. */
9746 const long max_exp_adj
= 2 *
9747 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9748 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9750 /* Get the user-requested adjustment. */
9751 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9753 /* The requested adjustment must be inside this range. This
9754 is a preliminary cap to avoid things like overflow, we
9755 may still fail to compute the result for other reasons. */
9756 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9758 REAL_VALUE_TYPE initial_result
;
9760 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9762 /* Ensure we didn't overflow. */
9763 if (! real_isinf (&initial_result
))
9765 const REAL_VALUE_TYPE trunc_result
9766 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9768 /* Only proceed if the target mode can hold the
9770 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9771 return build_real (type
, trunc_result
);
9780 /* Fold a call to builtin modf. */
9783 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9785 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9790 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9793 arg1
= build_fold_indirect_ref (arg1
);
9795 /* Proceed if a valid pointer type was passed in. */
9796 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9798 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9799 REAL_VALUE_TYPE trunc
, frac
;
9805 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9806 trunc
= frac
= *value
;
9809 /* For +-Inf, return (*arg1 = arg0, +-0). */
9811 frac
.sign
= value
->sign
;
9815 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9816 real_trunc (&trunc
, VOIDmode
, value
);
9817 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9818 /* If the original number was negative and already
9819 integral, then the fractional part is -0.0. */
9820 if (value
->sign
&& frac
.cl
== rvc_zero
)
9821 frac
.sign
= value
->sign
;
9825 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9826 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9827 build_real (rettype
, trunc
));
9828 TREE_SIDE_EFFECTS (arg1
) = 1;
9829 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9830 build_real (rettype
, frac
));
9836 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9837 ARG is the argument for the call. */
9840 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9842 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9845 if (!validate_arg (arg
, REAL_TYPE
))
9848 switch (builtin_index
)
9850 case BUILT_IN_ISINF
:
9851 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9852 return omit_one_operand (type
, integer_zero_node
, arg
);
9854 if (TREE_CODE (arg
) == REAL_CST
)
9856 r
= TREE_REAL_CST (arg
);
9857 if (real_isinf (&r
))
9858 return real_compare (GT_EXPR
, &r
, &dconst0
)
9859 ? integer_one_node
: integer_minus_one_node
;
9861 return integer_zero_node
;
9866 case BUILT_IN_ISINF_SIGN
:
9868 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9869 /* In a boolean context, GCC will fold the inner COND_EXPR to
9870 1. So e.g. "if (isinf_sign(x))" would be folded to just
9871 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9872 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9873 tree isinf_fn
= built_in_decls
[BUILT_IN_ISINF
];
9874 tree tmp
= NULL_TREE
;
9876 arg
= builtin_save_expr (arg
);
9878 if (signbit_fn
&& isinf_fn
)
9880 tree signbit_call
= build_call_expr (signbit_fn
, 1, arg
);
9881 tree isinf_call
= build_call_expr (isinf_fn
, 1, arg
);
9883 signbit_call
= fold_build2 (NE_EXPR
, integer_type_node
,
9884 signbit_call
, integer_zero_node
);
9885 isinf_call
= fold_build2 (NE_EXPR
, integer_type_node
,
9886 isinf_call
, integer_zero_node
);
9888 tmp
= fold_build3 (COND_EXPR
, integer_type_node
, signbit_call
,
9889 integer_minus_one_node
, integer_one_node
);
9890 tmp
= fold_build3 (COND_EXPR
, integer_type_node
, isinf_call
, tmp
,
9897 case BUILT_IN_ISFINITE
:
9898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9899 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9900 return omit_one_operand (type
, integer_one_node
, arg
);
9902 if (TREE_CODE (arg
) == REAL_CST
)
9904 r
= TREE_REAL_CST (arg
);
9905 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9910 case BUILT_IN_ISNAN
:
9911 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9912 return omit_one_operand (type
, integer_zero_node
, arg
);
9914 if (TREE_CODE (arg
) == REAL_CST
)
9916 r
= TREE_REAL_CST (arg
);
9917 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9920 arg
= builtin_save_expr (arg
);
9921 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9928 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9929 This builtin will generate code to return the appropriate floating
9930 point classification depending on the value of the floating point
9931 number passed in. The possible return values must be supplied as
9932 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9933 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9934 one floating point argument which is "type generic". */
9937 fold_builtin_fpclassify (tree exp
)
9939 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9940 arg
, type
, res
, tmp
;
9941 enum machine_mode mode
;
9945 /* Verify the required arguments in the original call. */
9946 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
9947 INTEGER_TYPE
, INTEGER_TYPE
,
9948 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
9951 fp_nan
= CALL_EXPR_ARG (exp
, 0);
9952 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
9953 fp_normal
= CALL_EXPR_ARG (exp
, 2);
9954 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
9955 fp_zero
= CALL_EXPR_ARG (exp
, 4);
9956 arg
= CALL_EXPR_ARG (exp
, 5);
9957 type
= TREE_TYPE (arg
);
9958 mode
= TYPE_MODE (type
);
9959 arg
= builtin_save_expr (fold_build1 (ABS_EXPR
, type
, arg
));
9963 (fabs(x) == Inf ? FP_INFINITE :
9964 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9965 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9967 tmp
= fold_build2 (EQ_EXPR
, integer_type_node
, arg
,
9968 build_real (type
, dconst0
));
9969 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, fp_zero
, fp_subnormal
);
9971 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9972 real_from_string (&r
, buf
);
9973 tmp
= fold_build2 (GE_EXPR
, integer_type_node
, arg
, build_real (type
, r
));
9974 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9976 if (HONOR_INFINITIES (mode
))
9979 tmp
= fold_build2 (EQ_EXPR
, integer_type_node
, arg
,
9980 build_real (type
, r
));
9981 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, fp_infinite
, res
);
9984 if (HONOR_NANS (mode
))
9986 tmp
= fold_build2 (ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9987 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9993 /* Fold a call to an unordered comparison function such as
9994 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9995 being called and ARG0 and ARG1 are the arguments for the call.
9996 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9997 the opposite of the desired result. UNORDERED_CODE is used
9998 for modes that can hold NaNs and ORDERED_CODE is used for
10002 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
10003 enum tree_code unordered_code
,
10004 enum tree_code ordered_code
)
10006 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10007 enum tree_code code
;
10009 enum tree_code code0
, code1
;
10010 tree cmp_type
= NULL_TREE
;
10012 type0
= TREE_TYPE (arg0
);
10013 type1
= TREE_TYPE (arg1
);
10015 code0
= TREE_CODE (type0
);
10016 code1
= TREE_CODE (type1
);
10018 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10019 /* Choose the wider of two real types. */
10020 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10022 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10024 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10027 arg0
= fold_convert (cmp_type
, arg0
);
10028 arg1
= fold_convert (cmp_type
, arg1
);
10030 if (unordered_code
== UNORDERED_EXPR
)
10032 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10033 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
10034 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
10037 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10039 return fold_build1 (TRUTH_NOT_EXPR
, type
,
10040 fold_build2 (code
, type
, arg0
, arg1
));
10043 /* Fold a call to built-in function FNDECL with 0 arguments.
10044 IGNORE is true if the result of the function call is ignored. This
10045 function returns NULL_TREE if no simplification was possible. */
10048 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10050 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10051 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10054 CASE_FLT_FN (BUILT_IN_INF
):
10055 case BUILT_IN_INFD32
:
10056 case BUILT_IN_INFD64
:
10057 case BUILT_IN_INFD128
:
10058 return fold_builtin_inf (type
, true);
10060 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10061 return fold_builtin_inf (type
, false);
10063 case BUILT_IN_CLASSIFY_TYPE
:
10064 return fold_builtin_classify_type (NULL_TREE
);
10072 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10073 IGNORE is true if the result of the function call is ignored. This
10074 function returns NULL_TREE if no simplification was possible. */
10077 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
10079 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10080 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10084 case BUILT_IN_CONSTANT_P
:
10086 tree val
= fold_builtin_constant_p (arg0
);
10088 /* Gimplification will pull the CALL_EXPR for the builtin out of
10089 an if condition. When not optimizing, we'll not CSE it back.
10090 To avoid link error types of regressions, return false now. */
10091 if (!val
&& !optimize
)
10092 val
= integer_zero_node
;
10097 case BUILT_IN_CLASSIFY_TYPE
:
10098 return fold_builtin_classify_type (arg0
);
10100 case BUILT_IN_STRLEN
:
10101 return fold_builtin_strlen (arg0
);
10103 CASE_FLT_FN (BUILT_IN_FABS
):
10104 return fold_builtin_fabs (arg0
, type
);
10107 case BUILT_IN_LABS
:
10108 case BUILT_IN_LLABS
:
10109 case BUILT_IN_IMAXABS
:
10110 return fold_builtin_abs (arg0
, type
);
10112 CASE_FLT_FN (BUILT_IN_CONJ
):
10113 if (validate_arg (arg0
, COMPLEX_TYPE
))
10114 return fold_build1 (CONJ_EXPR
, type
, arg0
);
10117 CASE_FLT_FN (BUILT_IN_CREAL
):
10118 if (validate_arg (arg0
, COMPLEX_TYPE
))
10119 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
10122 CASE_FLT_FN (BUILT_IN_CIMAG
):
10123 if (validate_arg (arg0
, COMPLEX_TYPE
))
10124 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
10127 CASE_FLT_FN (BUILT_IN_CCOS
):
10128 CASE_FLT_FN (BUILT_IN_CCOSH
):
10129 /* These functions are "even", i.e. f(x) == f(-x). */
10130 if (validate_arg (arg0
, COMPLEX_TYPE
))
10132 tree narg
= fold_strip_sign_ops (arg0
);
10134 return build_call_expr (fndecl
, 1, narg
);
10138 CASE_FLT_FN (BUILT_IN_CABS
):
10139 return fold_builtin_cabs (arg0
, type
, fndecl
);
10141 CASE_FLT_FN (BUILT_IN_CARG
):
10142 return fold_builtin_carg (arg0
, type
);
10144 CASE_FLT_FN (BUILT_IN_SQRT
):
10145 return fold_builtin_sqrt (arg0
, type
);
10147 CASE_FLT_FN (BUILT_IN_CBRT
):
10148 return fold_builtin_cbrt (arg0
, type
);
10150 CASE_FLT_FN (BUILT_IN_ASIN
):
10151 if (validate_arg (arg0
, REAL_TYPE
))
10152 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10153 &dconstm1
, &dconst1
, true);
10156 CASE_FLT_FN (BUILT_IN_ACOS
):
10157 if (validate_arg (arg0
, REAL_TYPE
))
10158 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10159 &dconstm1
, &dconst1
, true);
10162 CASE_FLT_FN (BUILT_IN_ATAN
):
10163 if (validate_arg (arg0
, REAL_TYPE
))
10164 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10167 CASE_FLT_FN (BUILT_IN_ASINH
):
10168 if (validate_arg (arg0
, REAL_TYPE
))
10169 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10172 CASE_FLT_FN (BUILT_IN_ACOSH
):
10173 if (validate_arg (arg0
, REAL_TYPE
))
10174 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10175 &dconst1
, NULL
, true);
10178 CASE_FLT_FN (BUILT_IN_ATANH
):
10179 if (validate_arg (arg0
, REAL_TYPE
))
10180 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10181 &dconstm1
, &dconst1
, false);
10184 CASE_FLT_FN (BUILT_IN_SIN
):
10185 if (validate_arg (arg0
, REAL_TYPE
))
10186 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10189 CASE_FLT_FN (BUILT_IN_COS
):
10190 return fold_builtin_cos (arg0
, type
, fndecl
);
10193 CASE_FLT_FN (BUILT_IN_TAN
):
10194 return fold_builtin_tan (arg0
, type
);
10196 CASE_FLT_FN (BUILT_IN_CEXP
):
10197 return fold_builtin_cexp (arg0
, type
);
10199 CASE_FLT_FN (BUILT_IN_CEXPI
):
10200 if (validate_arg (arg0
, REAL_TYPE
))
10201 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10204 CASE_FLT_FN (BUILT_IN_SINH
):
10205 if (validate_arg (arg0
, REAL_TYPE
))
10206 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10209 CASE_FLT_FN (BUILT_IN_COSH
):
10210 return fold_builtin_cosh (arg0
, type
, fndecl
);
10212 CASE_FLT_FN (BUILT_IN_TANH
):
10213 if (validate_arg (arg0
, REAL_TYPE
))
10214 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10217 CASE_FLT_FN (BUILT_IN_ERF
):
10218 if (validate_arg (arg0
, REAL_TYPE
))
10219 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10222 CASE_FLT_FN (BUILT_IN_ERFC
):
10223 if (validate_arg (arg0
, REAL_TYPE
))
10224 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10227 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10228 if (validate_arg (arg0
, REAL_TYPE
))
10229 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10232 CASE_FLT_FN (BUILT_IN_EXP
):
10233 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
10235 CASE_FLT_FN (BUILT_IN_EXP2
):
10236 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
10238 CASE_FLT_FN (BUILT_IN_EXP10
):
10239 CASE_FLT_FN (BUILT_IN_POW10
):
10240 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
10242 CASE_FLT_FN (BUILT_IN_EXPM1
):
10243 if (validate_arg (arg0
, REAL_TYPE
))
10244 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10247 CASE_FLT_FN (BUILT_IN_LOG
):
10248 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
10250 CASE_FLT_FN (BUILT_IN_LOG2
):
10251 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
10253 CASE_FLT_FN (BUILT_IN_LOG10
):
10254 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
10256 CASE_FLT_FN (BUILT_IN_LOG1P
):
10257 if (validate_arg (arg0
, REAL_TYPE
))
10258 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10259 &dconstm1
, NULL
, false);
10262 CASE_FLT_FN (BUILT_IN_J0
):
10263 if (validate_arg (arg0
, REAL_TYPE
))
10264 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10268 CASE_FLT_FN (BUILT_IN_J1
):
10269 if (validate_arg (arg0
, REAL_TYPE
))
10270 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10274 CASE_FLT_FN (BUILT_IN_Y0
):
10275 if (validate_arg (arg0
, REAL_TYPE
))
10276 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10277 &dconst0
, NULL
, false);
10280 CASE_FLT_FN (BUILT_IN_Y1
):
10281 if (validate_arg (arg0
, REAL_TYPE
))
10282 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10283 &dconst0
, NULL
, false);
10286 CASE_FLT_FN (BUILT_IN_NAN
):
10287 case BUILT_IN_NAND32
:
10288 case BUILT_IN_NAND64
:
10289 case BUILT_IN_NAND128
:
10290 return fold_builtin_nan (arg0
, type
, true);
10292 CASE_FLT_FN (BUILT_IN_NANS
):
10293 return fold_builtin_nan (arg0
, type
, false);
10295 CASE_FLT_FN (BUILT_IN_FLOOR
):
10296 return fold_builtin_floor (fndecl
, arg0
);
10298 CASE_FLT_FN (BUILT_IN_CEIL
):
10299 return fold_builtin_ceil (fndecl
, arg0
);
10301 CASE_FLT_FN (BUILT_IN_TRUNC
):
10302 return fold_builtin_trunc (fndecl
, arg0
);
10304 CASE_FLT_FN (BUILT_IN_ROUND
):
10305 return fold_builtin_round (fndecl
, arg0
);
10307 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10308 CASE_FLT_FN (BUILT_IN_RINT
):
10309 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
10311 CASE_FLT_FN (BUILT_IN_LCEIL
):
10312 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10313 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10314 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10315 CASE_FLT_FN (BUILT_IN_LROUND
):
10316 CASE_FLT_FN (BUILT_IN_LLROUND
):
10317 return fold_builtin_int_roundingfn (fndecl
, arg0
);
10319 CASE_FLT_FN (BUILT_IN_LRINT
):
10320 CASE_FLT_FN (BUILT_IN_LLRINT
):
10321 return fold_fixed_mathfn (fndecl
, arg0
);
10323 case BUILT_IN_BSWAP32
:
10324 case BUILT_IN_BSWAP64
:
10325 return fold_builtin_bswap (fndecl
, arg0
);
10327 CASE_INT_FN (BUILT_IN_FFS
):
10328 CASE_INT_FN (BUILT_IN_CLZ
):
10329 CASE_INT_FN (BUILT_IN_CTZ
):
10330 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10331 CASE_INT_FN (BUILT_IN_PARITY
):
10332 return fold_builtin_bitop (fndecl
, arg0
);
10334 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10335 return fold_builtin_signbit (arg0
, type
);
10337 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10338 return fold_builtin_significand (arg0
, type
);
10340 CASE_FLT_FN (BUILT_IN_ILOGB
):
10341 CASE_FLT_FN (BUILT_IN_LOGB
):
10342 return fold_builtin_logb (arg0
, type
);
10344 case BUILT_IN_ISASCII
:
10345 return fold_builtin_isascii (arg0
);
10347 case BUILT_IN_TOASCII
:
10348 return fold_builtin_toascii (arg0
);
10350 case BUILT_IN_ISDIGIT
:
10351 return fold_builtin_isdigit (arg0
);
10353 CASE_FLT_FN (BUILT_IN_FINITE
):
10354 case BUILT_IN_FINITED32
:
10355 case BUILT_IN_FINITED64
:
10356 case BUILT_IN_FINITED128
:
10357 case BUILT_IN_ISFINITE
:
10358 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISFINITE
);
10360 CASE_FLT_FN (BUILT_IN_ISINF
):
10361 case BUILT_IN_ISINFD32
:
10362 case BUILT_IN_ISINFD64
:
10363 case BUILT_IN_ISINFD128
:
10364 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
10366 case BUILT_IN_ISINF_SIGN
:
10367 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10369 CASE_FLT_FN (BUILT_IN_ISNAN
):
10370 case BUILT_IN_ISNAND32
:
10371 case BUILT_IN_ISNAND64
:
10372 case BUILT_IN_ISNAND128
:
10373 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
10375 case BUILT_IN_PRINTF
:
10376 case BUILT_IN_PRINTF_UNLOCKED
:
10377 case BUILT_IN_VPRINTF
:
10378 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10388 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10389 IGNORE is true if the result of the function call is ignored. This
10390 function returns NULL_TREE if no simplification was possible. */
10393 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10395 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10396 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10400 CASE_FLT_FN (BUILT_IN_JN
):
10401 if (validate_arg (arg0
, INTEGER_TYPE
)
10402 && validate_arg (arg1
, REAL_TYPE
))
10403 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10406 CASE_FLT_FN (BUILT_IN_YN
):
10407 if (validate_arg (arg0
, INTEGER_TYPE
)
10408 && validate_arg (arg1
, REAL_TYPE
))
10409 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10413 CASE_FLT_FN (BUILT_IN_DREM
):
10414 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10415 if (validate_arg (arg0
, REAL_TYPE
)
10416 && validate_arg(arg1
, REAL_TYPE
))
10417 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10420 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10421 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10422 if (validate_arg (arg0
, REAL_TYPE
)
10423 && validate_arg(arg1
, POINTER_TYPE
))
10424 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10427 CASE_FLT_FN (BUILT_IN_ATAN2
):
10428 if (validate_arg (arg0
, REAL_TYPE
)
10429 && validate_arg(arg1
, REAL_TYPE
))
10430 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10433 CASE_FLT_FN (BUILT_IN_FDIM
):
10434 if (validate_arg (arg0
, REAL_TYPE
)
10435 && validate_arg(arg1
, REAL_TYPE
))
10436 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10439 CASE_FLT_FN (BUILT_IN_HYPOT
):
10440 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
10442 CASE_FLT_FN (BUILT_IN_LDEXP
):
10443 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
10444 CASE_FLT_FN (BUILT_IN_SCALBN
):
10445 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10446 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
10448 CASE_FLT_FN (BUILT_IN_FREXP
):
10449 return fold_builtin_frexp (arg0
, arg1
, type
);
10451 CASE_FLT_FN (BUILT_IN_MODF
):
10452 return fold_builtin_modf (arg0
, arg1
, type
);
10454 case BUILT_IN_BZERO
:
10455 return fold_builtin_bzero (arg0
, arg1
, ignore
);
10457 case BUILT_IN_FPUTS
:
10458 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
10460 case BUILT_IN_FPUTS_UNLOCKED
:
10461 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
10463 case BUILT_IN_STRSTR
:
10464 return fold_builtin_strstr (arg0
, arg1
, type
);
10466 case BUILT_IN_STRCAT
:
10467 return fold_builtin_strcat (arg0
, arg1
);
10469 case BUILT_IN_STRSPN
:
10470 return fold_builtin_strspn (arg0
, arg1
);
10472 case BUILT_IN_STRCSPN
:
10473 return fold_builtin_strcspn (arg0
, arg1
);
10475 case BUILT_IN_STRCHR
:
10476 case BUILT_IN_INDEX
:
10477 return fold_builtin_strchr (arg0
, arg1
, type
);
10479 case BUILT_IN_STRRCHR
:
10480 case BUILT_IN_RINDEX
:
10481 return fold_builtin_strrchr (arg0
, arg1
, type
);
10483 case BUILT_IN_STRCPY
:
10484 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
10486 case BUILT_IN_STPCPY
:
10489 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
10493 return build_call_expr (fn
, 2, arg0
, arg1
);
10497 case BUILT_IN_STRCMP
:
10498 return fold_builtin_strcmp (arg0
, arg1
);
10500 case BUILT_IN_STRPBRK
:
10501 return fold_builtin_strpbrk (arg0
, arg1
, type
);
10503 case BUILT_IN_EXPECT
:
10504 return fold_builtin_expect (arg0
, arg1
);
10506 CASE_FLT_FN (BUILT_IN_POW
):
10507 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
10509 CASE_FLT_FN (BUILT_IN_POWI
):
10510 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
10512 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10513 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
10515 CASE_FLT_FN (BUILT_IN_FMIN
):
10516 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
10518 CASE_FLT_FN (BUILT_IN_FMAX
):
10519 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
10521 case BUILT_IN_ISGREATER
:
10522 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10523 case BUILT_IN_ISGREATEREQUAL
:
10524 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10525 case BUILT_IN_ISLESS
:
10526 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10527 case BUILT_IN_ISLESSEQUAL
:
10528 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10529 case BUILT_IN_ISLESSGREATER
:
10530 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10531 case BUILT_IN_ISUNORDERED
:
10532 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
10535 /* We do the folding for va_start in the expander. */
10536 case BUILT_IN_VA_START
:
10539 case BUILT_IN_SPRINTF
:
10540 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
10542 case BUILT_IN_OBJECT_SIZE
:
10543 return fold_builtin_object_size (arg0
, arg1
);
10545 case BUILT_IN_PRINTF
:
10546 case BUILT_IN_PRINTF_UNLOCKED
:
10547 case BUILT_IN_VPRINTF
:
10548 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
10550 case BUILT_IN_PRINTF_CHK
:
10551 case BUILT_IN_VPRINTF_CHK
:
10552 if (!validate_arg (arg0
, INTEGER_TYPE
)
10553 || TREE_SIDE_EFFECTS (arg0
))
10556 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
10559 case BUILT_IN_FPRINTF
:
10560 case BUILT_IN_FPRINTF_UNLOCKED
:
10561 case BUILT_IN_VFPRINTF
:
10562 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
10571 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10572 and ARG2. IGNORE is true if the result of the function call is ignored.
10573 This function returns NULL_TREE if no simplification was possible. */
10576 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10578 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10579 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10583 CASE_FLT_FN (BUILT_IN_SINCOS
):
10584 return fold_builtin_sincos (arg0
, arg1
, arg2
);
10586 CASE_FLT_FN (BUILT_IN_FMA
):
10587 if (validate_arg (arg0
, REAL_TYPE
)
10588 && validate_arg(arg1
, REAL_TYPE
)
10589 && validate_arg(arg2
, REAL_TYPE
))
10590 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
10593 CASE_FLT_FN (BUILT_IN_REMQUO
):
10594 if (validate_arg (arg0
, REAL_TYPE
)
10595 && validate_arg(arg1
, REAL_TYPE
)
10596 && validate_arg(arg2
, POINTER_TYPE
))
10597 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10600 case BUILT_IN_MEMSET
:
10601 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
10603 case BUILT_IN_BCOPY
:
10604 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
10606 case BUILT_IN_MEMCPY
:
10607 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
10609 case BUILT_IN_MEMPCPY
:
10610 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
10612 case BUILT_IN_MEMMOVE
:
10613 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
10615 case BUILT_IN_STRNCAT
:
10616 return fold_builtin_strncat (arg0
, arg1
, arg2
);
10618 case BUILT_IN_STRNCPY
:
10619 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10621 case BUILT_IN_STRNCMP
:
10622 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
10624 case BUILT_IN_MEMCHR
:
10625 return fold_builtin_memchr (arg0
, arg1
, arg2
, type
);
10627 case BUILT_IN_BCMP
:
10628 case BUILT_IN_MEMCMP
:
10629 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
10631 case BUILT_IN_SPRINTF
:
10632 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
10634 case BUILT_IN_STRCPY_CHK
:
10635 case BUILT_IN_STPCPY_CHK
:
10636 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10639 case BUILT_IN_STRCAT_CHK
:
10640 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
10642 case BUILT_IN_PRINTF_CHK
:
10643 case BUILT_IN_VPRINTF_CHK
:
10644 if (!validate_arg (arg0
, INTEGER_TYPE
)
10645 || TREE_SIDE_EFFECTS (arg0
))
10648 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
10651 case BUILT_IN_FPRINTF
:
10652 case BUILT_IN_FPRINTF_UNLOCKED
:
10653 case BUILT_IN_VFPRINTF
:
10654 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
10656 case BUILT_IN_FPRINTF_CHK
:
10657 case BUILT_IN_VFPRINTF_CHK
:
10658 if (!validate_arg (arg1
, INTEGER_TYPE
)
10659 || TREE_SIDE_EFFECTS (arg1
))
10662 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
10671 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10672 ARG2, and ARG3. IGNORE is true if the result of the function call is
10673 ignored. This function returns NULL_TREE if no simplification was
10677 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
10680 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10684 case BUILT_IN_MEMCPY_CHK
:
10685 case BUILT_IN_MEMPCPY_CHK
:
10686 case BUILT_IN_MEMMOVE_CHK
:
10687 case BUILT_IN_MEMSET_CHK
:
10688 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
10690 DECL_FUNCTION_CODE (fndecl
));
10692 case BUILT_IN_STRNCPY_CHK
:
10693 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10695 case BUILT_IN_STRNCAT_CHK
:
10696 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
10698 case BUILT_IN_FPRINTF_CHK
:
10699 case BUILT_IN_VFPRINTF_CHK
:
10700 if (!validate_arg (arg1
, INTEGER_TYPE
)
10701 || TREE_SIDE_EFFECTS (arg1
))
10704 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
10714 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10715 arguments, where NARGS <= 4. IGNORE is true if the result of the
10716 function call is ignored. This function returns NULL_TREE if no
10717 simplification was possible. Note that this only folds builtins with
10718 fixed argument patterns. Foldings that do varargs-to-varargs
10719 transformations, or that match calls with more than 4 arguments,
10720 need to be handled with fold_builtin_varargs instead. */
10722 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10725 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10727 tree ret
= NULL_TREE
;
10732 ret
= fold_builtin_0 (fndecl
, ignore
);
10735 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10738 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10741 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10744 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10752 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10753 TREE_NO_WARNING (ret
) = 1;
10759 /* Builtins with folding operations that operate on "..." arguments
10760 need special handling; we need to store the arguments in a convenient
10761 data structure before attempting any folding. Fortunately there are
10762 only a few builtins that fall into this category. FNDECL is the
10763 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10764 result of the function call is ignored. */
10767 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10769 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10770 tree ret
= NULL_TREE
;
10774 case BUILT_IN_SPRINTF_CHK
:
10775 case BUILT_IN_VSPRINTF_CHK
:
10776 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10779 case BUILT_IN_SNPRINTF_CHK
:
10780 case BUILT_IN_VSNPRINTF_CHK
:
10781 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10784 case BUILT_IN_FPCLASSIFY
:
10785 ret
= fold_builtin_fpclassify (exp
);
10793 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10794 TREE_NO_WARNING (ret
) = 1;
10800 /* A wrapper function for builtin folding that prevents warnings for
10801 "statement without effect" and the like, caused by removing the
10802 call node earlier than the warning is generated. */
10805 fold_call_expr (tree exp
, bool ignore
)
10807 tree ret
= NULL_TREE
;
10808 tree fndecl
= get_callee_fndecl (exp
);
10810 && TREE_CODE (fndecl
) == FUNCTION_DECL
10811 && DECL_BUILT_IN (fndecl
)
10812 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10813 yet. Defer folding until we see all the arguments
10814 (after inlining). */
10815 && !CALL_EXPR_VA_ARG_PACK (exp
))
10817 int nargs
= call_expr_nargs (exp
);
10819 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10820 instead last argument is __builtin_va_arg_pack (). Defer folding
10821 even in that case, until arguments are finalized. */
10822 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10824 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10826 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10827 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10828 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10832 /* FIXME: Don't use a list in this interface. */
10833 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10834 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10837 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10839 tree
*args
= CALL_EXPR_ARGP (exp
);
10840 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10843 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10846 /* Propagate location information from original call to
10847 expansion of builtin. Otherwise things like
10848 maybe_emit_chk_warning, that operate on the expansion
10849 of a builtin, will use the wrong location information. */
10850 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10852 tree realret
= ret
;
10853 if (TREE_CODE (ret
) == NOP_EXPR
)
10854 realret
= TREE_OPERAND (ret
, 0);
10855 if (CAN_HAVE_LOCATION_P (realret
)
10856 && !EXPR_HAS_LOCATION (realret
))
10857 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10867 /* Conveniently construct a function call expression. FNDECL names the
10868 function to be called and ARGLIST is a TREE_LIST of arguments. */
10871 build_function_call_expr (tree fndecl
, tree arglist
)
10873 tree fntype
= TREE_TYPE (fndecl
);
10874 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10875 int n
= list_length (arglist
);
10876 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10879 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10880 argarray
[i
] = TREE_VALUE (arglist
);
10881 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10884 /* Conveniently construct a function call expression. FNDECL names the
10885 function to be called, N is the number of arguments, and the "..."
10886 parameters are the argument expressions. */
10889 build_call_expr (tree fndecl
, int n
, ...)
10892 tree fntype
= TREE_TYPE (fndecl
);
10893 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10894 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10898 for (i
= 0; i
< n
; i
++)
10899 argarray
[i
] = va_arg (ap
, tree
);
10901 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10904 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10905 N arguments are passed in the array ARGARRAY. */
10908 fold_builtin_call_array (tree type
,
10913 tree ret
= NULL_TREE
;
10917 if (TREE_CODE (fn
) == ADDR_EXPR
)
10919 tree fndecl
= TREE_OPERAND (fn
, 0);
10920 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10921 && DECL_BUILT_IN (fndecl
))
10923 /* If last argument is __builtin_va_arg_pack (), arguments to this
10924 function are not finalized yet. Defer folding until they are. */
10925 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10927 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10929 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10930 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10931 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10932 return build_call_array (type
, fn
, n
, argarray
);
10934 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10936 tree arglist
= NULL_TREE
;
10937 for (i
= n
- 1; i
>= 0; i
--)
10938 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10939 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10943 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10945 /* First try the transformations that don't require consing up
10947 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10952 /* If we got this far, we need to build an exp. */
10953 exp
= build_call_array (type
, fn
, n
, argarray
);
10954 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10955 return ret
? ret
: exp
;
10959 return build_call_array (type
, fn
, n
, argarray
);
10962 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10963 along with N new arguments specified as the "..." parameters. SKIP
10964 is the number of arguments in EXP to be omitted. This function is used
10965 to do varargs-to-varargs transformations. */
10968 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10970 int oldnargs
= call_expr_nargs (exp
);
10971 int nargs
= oldnargs
- skip
+ n
;
10972 tree fntype
= TREE_TYPE (fndecl
);
10973 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10981 buffer
= XALLOCAVEC (tree
, nargs
);
10983 for (i
= 0; i
< n
; i
++)
10984 buffer
[i
] = va_arg (ap
, tree
);
10986 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10987 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10990 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10992 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10995 /* Validate a single argument ARG against a tree code CODE representing
10999 validate_arg (const_tree arg
, enum tree_code code
)
11003 else if (code
== POINTER_TYPE
)
11004 return POINTER_TYPE_P (TREE_TYPE (arg
));
11005 else if (code
== INTEGER_TYPE
)
11006 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11007 return code
== TREE_CODE (TREE_TYPE (arg
));
11010 /* This function validates the types of a function call argument list
11011 against a specified list of tree_codes. If the last specifier is a 0,
11012 that represents an ellipses, otherwise the last specifier must be a
11015 This is the GIMPLE version of validate_arglist. Eventually we want to
11016 completely convert builtins.c to work from GIMPLEs and the tree based
11017 validate_arglist will then be removed. */
11020 validate_gimple_arglist (const_gimple call
, ...)
11022 enum tree_code code
;
11028 va_start (ap
, call
);
11033 code
= va_arg (ap
, enum tree_code
);
11037 /* This signifies an ellipses, any further arguments are all ok. */
11041 /* This signifies an endlink, if no arguments remain, return
11042 true, otherwise return false. */
11043 res
= (i
== gimple_call_num_args (call
));
11046 /* If no parameters remain or the parameter's code does not
11047 match the specified code, return false. Otherwise continue
11048 checking any remaining arguments. */
11049 arg
= gimple_call_arg (call
, i
++);
11050 if (!validate_arg (arg
, code
))
11057 /* We need gotos here since we can only have one VA_CLOSE in a
11065 /* This function validates the types of a function call argument list
11066 against a specified list of tree_codes. If the last specifier is a 0,
11067 that represents an ellipses, otherwise the last specifier must be a
11071 validate_arglist (const_tree callexpr
, ...)
11073 enum tree_code code
;
11076 const_call_expr_arg_iterator iter
;
11079 va_start (ap
, callexpr
);
11080 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11084 code
= va_arg (ap
, enum tree_code
);
11088 /* This signifies an ellipses, any further arguments are all ok. */
11092 /* This signifies an endlink, if no arguments remain, return
11093 true, otherwise return false. */
11094 res
= !more_const_call_expr_args_p (&iter
);
11097 /* If no parameters remain or the parameter's code does not
11098 match the specified code, return false. Otherwise continue
11099 checking any remaining arguments. */
11100 arg
= next_const_call_expr_arg (&iter
);
11101 if (!validate_arg (arg
, code
))
11108 /* We need gotos here since we can only have one VA_CLOSE in a
11116 /* Default target-specific builtin expander that does nothing. */
11119 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11120 rtx target ATTRIBUTE_UNUSED
,
11121 rtx subtarget ATTRIBUTE_UNUSED
,
11122 enum machine_mode mode ATTRIBUTE_UNUSED
,
11123 int ignore ATTRIBUTE_UNUSED
)
11128 /* Returns true is EXP represents data that would potentially reside
11129 in a readonly section. */
11132 readonly_data_expr (tree exp
)
11136 if (TREE_CODE (exp
) != ADDR_EXPR
)
11139 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11143 /* Make sure we call decl_readonly_section only for trees it
11144 can handle (since it returns true for everything it doesn't
11146 if (TREE_CODE (exp
) == STRING_CST
11147 || TREE_CODE (exp
) == CONSTRUCTOR
11148 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11149 return decl_readonly_section (exp
, 0);
11154 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11155 to the call, and TYPE is its return type.
11157 Return NULL_TREE if no simplification was possible, otherwise return the
11158 simplified form of the call as a tree.
11160 The simplified form may be a constant or other expression which
11161 computes the same value, but in a more efficient manner (including
11162 calls to other builtin functions).
11164 The call may contain arguments which need to be evaluated, but
11165 which are not useful to determine the result of the call. In
11166 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11167 COMPOUND_EXPR will be an argument which must be evaluated.
11168 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11169 COMPOUND_EXPR in the chain will contain the tree for the simplified
11170 form of the builtin function call. */
11173 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
11175 if (!validate_arg (s1
, POINTER_TYPE
)
11176 || !validate_arg (s2
, POINTER_TYPE
))
11181 const char *p1
, *p2
;
11183 p2
= c_getstr (s2
);
11187 p1
= c_getstr (s1
);
11190 const char *r
= strstr (p1
, p2
);
11194 return build_int_cst (TREE_TYPE (s1
), 0);
11196 /* Return an offset into the constant string argument. */
11197 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11198 s1
, size_int (r
- p1
));
11199 return fold_convert (type
, tem
);
11202 /* The argument is const char *, and the result is char *, so we need
11203 a type conversion here to avoid a warning. */
11205 return fold_convert (type
, s1
);
11210 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11214 /* New argument list transforming strstr(s1, s2) to
11215 strchr(s1, s2[0]). */
11216 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11220 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11221 the call, and TYPE is its return type.
11223 Return NULL_TREE if no simplification was possible, otherwise return the
11224 simplified form of the call as a tree.
11226 The simplified form may be a constant or other expression which
11227 computes the same value, but in a more efficient manner (including
11228 calls to other builtin functions).
11230 The call may contain arguments which need to be evaluated, but
11231 which are not useful to determine the result of the call. In
11232 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11233 COMPOUND_EXPR will be an argument which must be evaluated.
11234 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11235 COMPOUND_EXPR in the chain will contain the tree for the simplified
11236 form of the builtin function call. */
11239 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
11241 if (!validate_arg (s1
, POINTER_TYPE
)
11242 || !validate_arg (s2
, INTEGER_TYPE
))
11248 if (TREE_CODE (s2
) != INTEGER_CST
)
11251 p1
= c_getstr (s1
);
11258 if (target_char_cast (s2
, &c
))
11261 r
= strchr (p1
, c
);
11264 return build_int_cst (TREE_TYPE (s1
), 0);
11266 /* Return an offset into the constant string argument. */
11267 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11268 s1
, size_int (r
- p1
));
11269 return fold_convert (type
, tem
);
11275 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11276 the call, and TYPE is its return type.
11278 Return NULL_TREE if no simplification was possible, otherwise return the
11279 simplified form of the call as a tree.
11281 The simplified form may be a constant or other expression which
11282 computes the same value, but in a more efficient manner (including
11283 calls to other builtin functions).
11285 The call may contain arguments which need to be evaluated, but
11286 which are not useful to determine the result of the call. In
11287 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11288 COMPOUND_EXPR will be an argument which must be evaluated.
11289 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11290 COMPOUND_EXPR in the chain will contain the tree for the simplified
11291 form of the builtin function call. */
11294 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
11296 if (!validate_arg (s1
, POINTER_TYPE
)
11297 || !validate_arg (s2
, INTEGER_TYPE
))
11304 if (TREE_CODE (s2
) != INTEGER_CST
)
11307 p1
= c_getstr (s1
);
11314 if (target_char_cast (s2
, &c
))
11317 r
= strrchr (p1
, c
);
11320 return build_int_cst (TREE_TYPE (s1
), 0);
11322 /* Return an offset into the constant string argument. */
11323 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11324 s1
, size_int (r
- p1
));
11325 return fold_convert (type
, tem
);
11328 if (! integer_zerop (s2
))
11331 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11335 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11336 return build_call_expr (fn
, 2, s1
, s2
);
11340 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11341 to the call, and TYPE is its return type.
11343 Return NULL_TREE if no simplification was possible, otherwise return the
11344 simplified form of the call as a tree.
11346 The simplified form may be a constant or other expression which
11347 computes the same value, but in a more efficient manner (including
11348 calls to other builtin functions).
11350 The call may contain arguments which need to be evaluated, but
11351 which are not useful to determine the result of the call. In
11352 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11353 COMPOUND_EXPR will be an argument which must be evaluated.
11354 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11355 COMPOUND_EXPR in the chain will contain the tree for the simplified
11356 form of the builtin function call. */
11359 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
11361 if (!validate_arg (s1
, POINTER_TYPE
)
11362 || !validate_arg (s2
, POINTER_TYPE
))
11367 const char *p1
, *p2
;
11369 p2
= c_getstr (s2
);
11373 p1
= c_getstr (s1
);
11376 const char *r
= strpbrk (p1
, p2
);
11380 return build_int_cst (TREE_TYPE (s1
), 0);
11382 /* Return an offset into the constant string argument. */
11383 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11384 s1
, size_int (r
- p1
));
11385 return fold_convert (type
, tem
);
11389 /* strpbrk(x, "") == NULL.
11390 Evaluate and ignore s1 in case it had side-effects. */
11391 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
11394 return NULL_TREE
; /* Really call strpbrk. */
11396 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11400 /* New argument list transforming strpbrk(s1, s2) to
11401 strchr(s1, s2[0]). */
11402 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11406 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11409 Return NULL_TREE if no simplification was possible, otherwise return the
11410 simplified form of the call as a tree.
11412 The simplified form may be a constant or other expression which
11413 computes the same value, but in a more efficient manner (including
11414 calls to other builtin functions).
11416 The call may contain arguments which need to be evaluated, but
11417 which are not useful to determine the result of the call. In
11418 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11419 COMPOUND_EXPR will be an argument which must be evaluated.
11420 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11421 COMPOUND_EXPR in the chain will contain the tree for the simplified
11422 form of the builtin function call. */
11425 fold_builtin_strcat (tree dst
, tree src
)
11427 if (!validate_arg (dst
, POINTER_TYPE
)
11428 || !validate_arg (src
, POINTER_TYPE
))
11432 const char *p
= c_getstr (src
);
11434 /* If the string length is zero, return the dst parameter. */
11435 if (p
&& *p
== '\0')
11442 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11443 arguments to the call.
11445 Return NULL_TREE if no simplification was possible, otherwise return the
11446 simplified form of the call as a tree.
11448 The simplified form may be a constant or other expression which
11449 computes the same value, but in a more efficient manner (including
11450 calls to other builtin functions).
11452 The call may contain arguments which need to be evaluated, but
11453 which are not useful to determine the result of the call. In
11454 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11455 COMPOUND_EXPR will be an argument which must be evaluated.
11456 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11457 COMPOUND_EXPR in the chain will contain the tree for the simplified
11458 form of the builtin function call. */
11461 fold_builtin_strncat (tree dst
, tree src
, tree len
)
11463 if (!validate_arg (dst
, POINTER_TYPE
)
11464 || !validate_arg (src
, POINTER_TYPE
)
11465 || !validate_arg (len
, INTEGER_TYPE
))
11469 const char *p
= c_getstr (src
);
11471 /* If the requested length is zero, or the src parameter string
11472 length is zero, return the dst parameter. */
11473 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11474 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
11476 /* If the requested len is greater than or equal to the string
11477 length, call strcat. */
11478 if (TREE_CODE (len
) == INTEGER_CST
&& p
11479 && compare_tree_int (len
, strlen (p
)) >= 0)
11481 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
11483 /* If the replacement _DECL isn't initialized, don't do the
11488 return build_call_expr (fn
, 2, dst
, src
);
11494 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11497 Return NULL_TREE if no simplification was possible, otherwise return the
11498 simplified form of the call as a tree.
11500 The simplified form may be a constant or other expression which
11501 computes the same value, but in a more efficient manner (including
11502 calls to other builtin functions).
11504 The call may contain arguments which need to be evaluated, but
11505 which are not useful to determine the result of the call. In
11506 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11507 COMPOUND_EXPR will be an argument which must be evaluated.
11508 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11509 COMPOUND_EXPR in the chain will contain the tree for the simplified
11510 form of the builtin function call. */
11513 fold_builtin_strspn (tree s1
, tree s2
)
11515 if (!validate_arg (s1
, POINTER_TYPE
)
11516 || !validate_arg (s2
, POINTER_TYPE
))
11520 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11522 /* If both arguments are constants, evaluate at compile-time. */
11525 const size_t r
= strspn (p1
, p2
);
11526 return size_int (r
);
11529 /* If either argument is "", return NULL_TREE. */
11530 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11531 /* Evaluate and ignore both arguments in case either one has
11533 return omit_two_operands (size_type_node
, size_zero_node
,
11539 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11542 Return NULL_TREE if no simplification was possible, otherwise return the
11543 simplified form of the call as a tree.
11545 The simplified form may be a constant or other expression which
11546 computes the same value, but in a more efficient manner (including
11547 calls to other builtin functions).
11549 The call may contain arguments which need to be evaluated, but
11550 which are not useful to determine the result of the call. In
11551 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11552 COMPOUND_EXPR will be an argument which must be evaluated.
11553 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11554 COMPOUND_EXPR in the chain will contain the tree for the simplified
11555 form of the builtin function call. */
11558 fold_builtin_strcspn (tree s1
, tree s2
)
11560 if (!validate_arg (s1
, POINTER_TYPE
)
11561 || !validate_arg (s2
, POINTER_TYPE
))
11565 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11567 /* If both arguments are constants, evaluate at compile-time. */
11570 const size_t r
= strcspn (p1
, p2
);
11571 return size_int (r
);
11574 /* If the first argument is "", return NULL_TREE. */
11575 if (p1
&& *p1
== '\0')
11577 /* Evaluate and ignore argument s2 in case it has
11579 return omit_one_operand (size_type_node
,
11580 size_zero_node
, s2
);
11583 /* If the second argument is "", return __builtin_strlen(s1). */
11584 if (p2
&& *p2
== '\0')
11586 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11588 /* If the replacement _DECL isn't initialized, don't do the
11593 return build_call_expr (fn
, 1, s1
);
11599 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11600 to the call. IGNORE is true if the value returned
11601 by the builtin will be ignored. UNLOCKED is true is true if this
11602 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11603 the known length of the string. Return NULL_TREE if no simplification
11607 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
11609 /* If we're using an unlocked function, assume the other unlocked
11610 functions exist explicitly. */
11611 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11612 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11613 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11614 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11616 /* If the return value is used, don't do the transformation. */
11620 /* Verify the arguments in the original call. */
11621 if (!validate_arg (arg0
, POINTER_TYPE
)
11622 || !validate_arg (arg1
, POINTER_TYPE
))
11626 len
= c_strlen (arg0
, 0);
11628 /* Get the length of the string passed to fputs. If the length
11629 can't be determined, punt. */
11631 || TREE_CODE (len
) != INTEGER_CST
)
11634 switch (compare_tree_int (len
, 1))
11636 case -1: /* length is 0, delete the call entirely . */
11637 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
11639 case 0: /* length is 1, call fputc. */
11641 const char *p
= c_getstr (arg0
);
11646 return build_call_expr (fn_fputc
, 2,
11647 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11653 case 1: /* length is greater than 1, call fwrite. */
11655 /* If optimizing for size keep fputs. */
11656 if (optimize_function_for_size_p (cfun
))
11658 /* New argument list transforming fputs(string, stream) to
11659 fwrite(string, 1, len, stream). */
11661 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
11666 gcc_unreachable ();
11671 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11672 produced. False otherwise. This is done so that we don't output the error
11673 or warning twice or three times. */
11676 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11678 tree fntype
= TREE_TYPE (current_function_decl
);
11679 int nargs
= call_expr_nargs (exp
);
11682 if (TYPE_ARG_TYPES (fntype
) == 0
11683 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
11684 == void_type_node
))
11686 error ("%<va_start%> used in function with fixed args");
11692 if (va_start_p
&& (nargs
!= 2))
11694 error ("wrong number of arguments to function %<va_start%>");
11697 arg
= CALL_EXPR_ARG (exp
, 1);
11699 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11700 when we checked the arguments and if needed issued a warning. */
11705 /* Evidently an out of date version of <stdarg.h>; can't validate
11706 va_start's second argument, but can still work as intended. */
11707 warning (0, "%<__builtin_next_arg%> called without an argument");
11710 else if (nargs
> 1)
11712 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11715 arg
= CALL_EXPR_ARG (exp
, 0);
11718 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11719 or __builtin_next_arg (0) the first time we see it, after checking
11720 the arguments and if needed issuing a warning. */
11721 if (!integer_zerop (arg
))
11723 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11725 /* Strip off all nops for the sake of the comparison. This
11726 is not quite the same as STRIP_NOPS. It does more.
11727 We must also strip off INDIRECT_EXPR for C++ reference
11729 while (CONVERT_EXPR_P (arg
)
11730 || TREE_CODE (arg
) == INDIRECT_REF
)
11731 arg
= TREE_OPERAND (arg
, 0);
11732 if (arg
!= last_parm
)
11734 /* FIXME: Sometimes with the tree optimizers we can get the
11735 not the last argument even though the user used the last
11736 argument. We just warn and set the arg to be the last
11737 argument so that we will get wrong-code because of
11739 warning (0, "second parameter of %<va_start%> not last named argument");
11742 /* Undefined by C99 7.15.1.4p4 (va_start):
11743 "If the parameter parmN is declared with the register storage
11744 class, with a function or array type, or with a type that is
11745 not compatible with the type that results after application of
11746 the default argument promotions, the behavior is undefined."
11748 else if (DECL_REGISTER (arg
))
11749 warning (0, "undefined behaviour when second parameter of "
11750 "%<va_start%> is declared with %<register%> storage");
11752 /* We want to verify the second parameter just once before the tree
11753 optimizers are run and then avoid keeping it in the tree,
11754 as otherwise we could warn even for correct code like:
11755 void foo (int i, ...)
11756 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11758 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11760 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11766 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11767 ORIG may be null if this is a 2-argument call. We don't attempt to
11768 simplify calls with more than 3 arguments.
11770 Return NULL_TREE if no simplification was possible, otherwise return the
11771 simplified form of the call as a tree. If IGNORED is true, it means that
11772 the caller does not use the returned value of the function. */
11775 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
11778 const char *fmt_str
= NULL
;
11780 /* Verify the required arguments in the original call. We deal with two
11781 types of sprintf() calls: 'sprintf (str, fmt)' and
11782 'sprintf (dest, "%s", orig)'. */
11783 if (!validate_arg (dest
, POINTER_TYPE
)
11784 || !validate_arg (fmt
, POINTER_TYPE
))
11786 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11789 /* Check whether the format is a literal string constant. */
11790 fmt_str
= c_getstr (fmt
);
11791 if (fmt_str
== NULL
)
11795 retval
= NULL_TREE
;
11797 if (!init_target_chars ())
11800 /* If the format doesn't contain % args or %%, use strcpy. */
11801 if (strchr (fmt_str
, target_percent
) == NULL
)
11803 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11808 /* Don't optimize sprintf (buf, "abc", ptr++). */
11812 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11813 'format' is known to contain no % formats. */
11814 call
= build_call_expr (fn
, 2, dest
, fmt
);
11816 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11819 /* If the format is "%s", use strcpy if the result isn't used. */
11820 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11823 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11828 /* Don't crash on sprintf (str1, "%s"). */
11832 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11835 retval
= c_strlen (orig
, 1);
11836 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11839 call
= build_call_expr (fn
, 2, dest
, orig
);
11842 if (call
&& retval
)
11844 retval
= fold_convert
11845 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11847 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11853 /* Expand a call EXP to __builtin_object_size. */
11856 expand_builtin_object_size (tree exp
)
11859 int object_size_type
;
11860 tree fndecl
= get_callee_fndecl (exp
);
11862 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11864 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11866 expand_builtin_trap ();
11870 ost
= CALL_EXPR_ARG (exp
, 1);
11873 if (TREE_CODE (ost
) != INTEGER_CST
11874 || tree_int_cst_sgn (ost
) < 0
11875 || compare_tree_int (ost
, 3) > 0)
11877 error ("%Klast argument of %D is not integer constant between 0 and 3",
11879 expand_builtin_trap ();
11883 object_size_type
= tree_low_cst (ost
, 0);
11885 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11888 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11889 FCODE is the BUILT_IN_* to use.
11890 Return NULL_RTX if we failed; the caller should emit a normal call,
11891 otherwise try to get the result in TARGET, if convenient (and in
11892 mode MODE if that's convenient). */
11895 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11896 enum built_in_function fcode
)
11898 tree dest
, src
, len
, size
;
11900 if (!validate_arglist (exp
,
11902 fcode
== BUILT_IN_MEMSET_CHK
11903 ? INTEGER_TYPE
: POINTER_TYPE
,
11904 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11907 dest
= CALL_EXPR_ARG (exp
, 0);
11908 src
= CALL_EXPR_ARG (exp
, 1);
11909 len
= CALL_EXPR_ARG (exp
, 2);
11910 size
= CALL_EXPR_ARG (exp
, 3);
11912 if (! host_integerp (size
, 1))
11915 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11919 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11921 warning (0, "%Kcall to %D will always overflow destination buffer",
11922 exp
, get_callee_fndecl (exp
));
11927 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11928 mem{cpy,pcpy,move,set} is available. */
11931 case BUILT_IN_MEMCPY_CHK
:
11932 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11934 case BUILT_IN_MEMPCPY_CHK
:
11935 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11937 case BUILT_IN_MEMMOVE_CHK
:
11938 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11940 case BUILT_IN_MEMSET_CHK
:
11941 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11950 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11951 STRIP_TYPE_NOPS (fn
);
11952 while (TREE_CODE (fn
) == COMPOUND_EXPR
)
11954 expand_expr (TREE_OPERAND (fn
, 0), const0_rtx
, VOIDmode
,
11956 fn
= TREE_OPERAND (fn
, 1);
11958 if (TREE_CODE (fn
) == CALL_EXPR
)
11959 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11960 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11962 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11966 unsigned int dest_align
11967 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11969 /* If DEST is not a pointer type, call the normal function. */
11970 if (dest_align
== 0)
11973 /* If SRC and DEST are the same (and not volatile), do nothing. */
11974 if (operand_equal_p (src
, dest
, 0))
11978 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11980 /* Evaluate and ignore LEN in case it has side-effects. */
11981 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11982 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11985 expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11986 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11989 /* __memmove_chk special case. */
11990 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11992 unsigned int src_align
11993 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11995 if (src_align
== 0)
11998 /* If src is categorized for a readonly section we can use
11999 normal __memcpy_chk. */
12000 if (readonly_data_expr (src
))
12002 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12005 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
12006 STRIP_TYPE_NOPS (fn
);
12007 while (TREE_CODE (fn
) == COMPOUND_EXPR
)
12009 expand_expr (TREE_OPERAND (fn
, 0), const0_rtx
, VOIDmode
,
12011 fn
= TREE_OPERAND (fn
, 1);
12013 if (TREE_CODE (fn
) == CALL_EXPR
)
12014 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12015 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12022 /* Emit warning if a buffer overflow is detected at compile time. */
12025 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12032 case BUILT_IN_STRCPY_CHK
:
12033 case BUILT_IN_STPCPY_CHK
:
12034 /* For __strcat_chk the warning will be emitted only if overflowing
12035 by at least strlen (dest) + 1 bytes. */
12036 case BUILT_IN_STRCAT_CHK
:
12037 len
= CALL_EXPR_ARG (exp
, 1);
12038 size
= CALL_EXPR_ARG (exp
, 2);
12041 case BUILT_IN_STRNCAT_CHK
:
12042 case BUILT_IN_STRNCPY_CHK
:
12043 len
= CALL_EXPR_ARG (exp
, 2);
12044 size
= CALL_EXPR_ARG (exp
, 3);
12046 case BUILT_IN_SNPRINTF_CHK
:
12047 case BUILT_IN_VSNPRINTF_CHK
:
12048 len
= CALL_EXPR_ARG (exp
, 1);
12049 size
= CALL_EXPR_ARG (exp
, 3);
12052 gcc_unreachable ();
12058 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12063 len
= c_strlen (len
, 1);
12064 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12067 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12069 tree src
= CALL_EXPR_ARG (exp
, 1);
12070 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12072 src
= c_strlen (src
, 1);
12073 if (! src
|| ! host_integerp (src
, 1))
12075 warning (0, "%Kcall to %D might overflow destination buffer",
12076 exp
, get_callee_fndecl (exp
));
12079 else if (tree_int_cst_lt (src
, size
))
12082 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12085 warning (0, "%Kcall to %D will always overflow destination buffer",
12086 exp
, get_callee_fndecl (exp
));
12089 /* Emit warning if a buffer overflow is detected at compile time
12090 in __sprintf_chk/__vsprintf_chk calls. */
12093 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12095 tree dest
, size
, len
, fmt
, flag
;
12096 const char *fmt_str
;
12097 int nargs
= call_expr_nargs (exp
);
12099 /* Verify the required arguments in the original call. */
12103 dest
= CALL_EXPR_ARG (exp
, 0);
12104 flag
= CALL_EXPR_ARG (exp
, 1);
12105 size
= CALL_EXPR_ARG (exp
, 2);
12106 fmt
= CALL_EXPR_ARG (exp
, 3);
12108 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12111 /* Check whether the format is a literal string constant. */
12112 fmt_str
= c_getstr (fmt
);
12113 if (fmt_str
== NULL
)
12116 if (!init_target_chars ())
12119 /* If the format doesn't contain % args or %%, we know its size. */
12120 if (strchr (fmt_str
, target_percent
) == 0)
12121 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12122 /* If the format is "%s" and first ... argument is a string literal,
12124 else if (fcode
== BUILT_IN_SPRINTF_CHK
12125 && strcmp (fmt_str
, target_percent_s
) == 0)
12131 arg
= CALL_EXPR_ARG (exp
, 4);
12132 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12135 len
= c_strlen (arg
, 1);
12136 if (!len
|| ! host_integerp (len
, 1))
12142 if (! tree_int_cst_lt (len
, size
))
12144 warning (0, "%Kcall to %D will always overflow destination buffer",
12145 exp
, get_callee_fndecl (exp
));
12149 /* Emit warning if a free is called with address of a variable. */
12152 maybe_emit_free_warning (tree exp
)
12154 tree arg
= CALL_EXPR_ARG (exp
, 0);
12157 if (TREE_CODE (arg
) != ADDR_EXPR
)
12160 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12161 if (arg
== NULL
|| INDIRECT_REF_P (arg
))
12164 if (SSA_VAR_P (arg
))
12165 warning (0, "%Kattempt to free a non-heap object %qD", exp
, arg
);
12167 warning (0, "%Kattempt to free a non-heap object", exp
);
12170 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12174 fold_builtin_object_size (tree ptr
, tree ost
)
12176 tree ret
= NULL_TREE
;
12177 int object_size_type
;
12179 if (!validate_arg (ptr
, POINTER_TYPE
)
12180 || !validate_arg (ost
, INTEGER_TYPE
))
12185 if (TREE_CODE (ost
) != INTEGER_CST
12186 || tree_int_cst_sgn (ost
) < 0
12187 || compare_tree_int (ost
, 3) > 0)
12190 object_size_type
= tree_low_cst (ost
, 0);
12192 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12193 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12194 and (size_t) 0 for types 2 and 3. */
12195 if (TREE_SIDE_EFFECTS (ptr
))
12196 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12198 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12199 ret
= build_int_cstu (size_type_node
,
12200 compute_builtin_object_size (ptr
, object_size_type
));
12202 else if (TREE_CODE (ptr
) == SSA_NAME
)
12204 unsigned HOST_WIDE_INT bytes
;
12206 /* If object size is not known yet, delay folding until
12207 later. Maybe subsequent passes will help determining
12209 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12210 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
12212 ret
= build_int_cstu (size_type_node
, bytes
);
12217 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
12218 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
12219 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
12226 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12227 DEST, SRC, LEN, and SIZE are the arguments to the call.
12228 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12229 code of the builtin. If MAXLEN is not NULL, it is maximum length
12230 passed as third argument. */
12233 fold_builtin_memory_chk (tree fndecl
,
12234 tree dest
, tree src
, tree len
, tree size
,
12235 tree maxlen
, bool ignore
,
12236 enum built_in_function fcode
)
12240 if (!validate_arg (dest
, POINTER_TYPE
)
12241 || !validate_arg (src
,
12242 (fcode
== BUILT_IN_MEMSET_CHK
12243 ? INTEGER_TYPE
: POINTER_TYPE
))
12244 || !validate_arg (len
, INTEGER_TYPE
)
12245 || !validate_arg (size
, INTEGER_TYPE
))
12248 /* If SRC and DEST are the same (and not volatile), return DEST
12249 (resp. DEST+LEN for __mempcpy_chk). */
12250 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12252 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12253 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12256 tree temp
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
12257 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12261 if (! host_integerp (size
, 1))
12264 if (! integer_all_onesp (size
))
12266 if (! host_integerp (len
, 1))
12268 /* If LEN is not constant, try MAXLEN too.
12269 For MAXLEN only allow optimizing into non-_ocs function
12270 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12271 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12273 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12275 /* (void) __mempcpy_chk () can be optimized into
12276 (void) __memcpy_chk (). */
12277 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12281 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
12289 if (tree_int_cst_lt (size
, maxlen
))
12294 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12295 mem{cpy,pcpy,move,set} is available. */
12298 case BUILT_IN_MEMCPY_CHK
:
12299 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
12301 case BUILT_IN_MEMPCPY_CHK
:
12302 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
12304 case BUILT_IN_MEMMOVE_CHK
:
12305 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
12307 case BUILT_IN_MEMSET_CHK
:
12308 fn
= built_in_decls
[BUILT_IN_MEMSET
];
12317 return build_call_expr (fn
, 3, dest
, src
, len
);
12320 /* Fold a call to the __st[rp]cpy_chk builtin.
12321 DEST, SRC, and SIZE are the arguments to the call.
12322 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12323 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12324 strings passed as second argument. */
12327 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
12328 tree maxlen
, bool ignore
,
12329 enum built_in_function fcode
)
12333 if (!validate_arg (dest
, POINTER_TYPE
)
12334 || !validate_arg (src
, POINTER_TYPE
)
12335 || !validate_arg (size
, INTEGER_TYPE
))
12338 /* If SRC and DEST are the same (and not volatile), return DEST. */
12339 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12340 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12342 if (! host_integerp (size
, 1))
12345 if (! integer_all_onesp (size
))
12347 len
= c_strlen (src
, 1);
12348 if (! len
|| ! host_integerp (len
, 1))
12350 /* If LEN is not constant, try MAXLEN too.
12351 For MAXLEN only allow optimizing into non-_ocs function
12352 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12353 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12355 if (fcode
== BUILT_IN_STPCPY_CHK
)
12360 /* If return value of __stpcpy_chk is ignored,
12361 optimize into __strcpy_chk. */
12362 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
12366 return build_call_expr (fn
, 3, dest
, src
, size
);
12369 if (! len
|| TREE_SIDE_EFFECTS (len
))
12372 /* If c_strlen returned something, but not a constant,
12373 transform __strcpy_chk into __memcpy_chk. */
12374 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12378 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
12379 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
12380 build_call_expr (fn
, 4,
12381 dest
, src
, len
, size
));
12387 if (! tree_int_cst_lt (maxlen
, size
))
12391 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12392 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
12393 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
12397 return build_call_expr (fn
, 2, dest
, src
);
12400 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12401 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12402 length passed as third argument. */
12405 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
12410 if (!validate_arg (dest
, POINTER_TYPE
)
12411 || !validate_arg (src
, POINTER_TYPE
)
12412 || !validate_arg (len
, INTEGER_TYPE
)
12413 || !validate_arg (size
, INTEGER_TYPE
))
12416 if (! host_integerp (size
, 1))
12419 if (! integer_all_onesp (size
))
12421 if (! host_integerp (len
, 1))
12423 /* If LEN is not constant, try MAXLEN too.
12424 For MAXLEN only allow optimizing into non-_ocs function
12425 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12426 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12432 if (tree_int_cst_lt (size
, maxlen
))
12436 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12437 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
12441 return build_call_expr (fn
, 3, dest
, src
, len
);
12444 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12445 are the arguments to the call. */
12448 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
12453 if (!validate_arg (dest
, POINTER_TYPE
)
12454 || !validate_arg (src
, POINTER_TYPE
)
12455 || !validate_arg (size
, INTEGER_TYPE
))
12458 p
= c_getstr (src
);
12459 /* If the SRC parameter is "", return DEST. */
12460 if (p
&& *p
== '\0')
12461 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12463 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12466 /* If __builtin_strcat_chk is used, assume strcat is available. */
12467 fn
= built_in_decls
[BUILT_IN_STRCAT
];
12471 return build_call_expr (fn
, 2, dest
, src
);
12474 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12478 fold_builtin_strncat_chk (tree fndecl
,
12479 tree dest
, tree src
, tree len
, tree size
)
12484 if (!validate_arg (dest
, POINTER_TYPE
)
12485 || !validate_arg (src
, POINTER_TYPE
)
12486 || !validate_arg (size
, INTEGER_TYPE
)
12487 || !validate_arg (size
, INTEGER_TYPE
))
12490 p
= c_getstr (src
);
12491 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12492 if (p
&& *p
== '\0')
12493 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12494 else if (integer_zerop (len
))
12495 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12497 if (! host_integerp (size
, 1))
12500 if (! integer_all_onesp (size
))
12502 tree src_len
= c_strlen (src
, 1);
12504 && host_integerp (src_len
, 1)
12505 && host_integerp (len
, 1)
12506 && ! tree_int_cst_lt (len
, src_len
))
12508 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12509 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
12513 return build_call_expr (fn
, 3, dest
, src
, size
);
12518 /* If __builtin_strncat_chk is used, assume strncat is available. */
12519 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
12523 return build_call_expr (fn
, 3, dest
, src
, len
);
12526 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12527 a normal call should be emitted rather than expanding the function
12528 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12531 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
12533 tree dest
, size
, len
, fn
, fmt
, flag
;
12534 const char *fmt_str
;
12535 int nargs
= call_expr_nargs (exp
);
12537 /* Verify the required arguments in the original call. */
12540 dest
= CALL_EXPR_ARG (exp
, 0);
12541 if (!validate_arg (dest
, POINTER_TYPE
))
12543 flag
= CALL_EXPR_ARG (exp
, 1);
12544 if (!validate_arg (flag
, INTEGER_TYPE
))
12546 size
= CALL_EXPR_ARG (exp
, 2);
12547 if (!validate_arg (size
, INTEGER_TYPE
))
12549 fmt
= CALL_EXPR_ARG (exp
, 3);
12550 if (!validate_arg (fmt
, POINTER_TYPE
))
12553 if (! host_integerp (size
, 1))
12558 if (!init_target_chars ())
12561 /* Check whether the format is a literal string constant. */
12562 fmt_str
= c_getstr (fmt
);
12563 if (fmt_str
!= NULL
)
12565 /* If the format doesn't contain % args or %%, we know the size. */
12566 if (strchr (fmt_str
, target_percent
) == 0)
12568 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
12569 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12571 /* If the format is "%s" and first ... argument is a string literal,
12572 we know the size too. */
12573 else if (fcode
== BUILT_IN_SPRINTF_CHK
12574 && strcmp (fmt_str
, target_percent_s
) == 0)
12580 arg
= CALL_EXPR_ARG (exp
, 4);
12581 if (validate_arg (arg
, POINTER_TYPE
))
12583 len
= c_strlen (arg
, 1);
12584 if (! len
|| ! host_integerp (len
, 1))
12591 if (! integer_all_onesp (size
))
12593 if (! len
|| ! tree_int_cst_lt (len
, size
))
12597 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12598 or if format doesn't contain % chars or is "%s". */
12599 if (! integer_zerop (flag
))
12601 if (fmt_str
== NULL
)
12603 if (strchr (fmt_str
, target_percent
) != NULL
12604 && strcmp (fmt_str
, target_percent_s
))
12608 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12609 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
12610 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
12614 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
12617 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12618 a normal call should be emitted rather than expanding the function
12619 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12620 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12621 passed as second argument. */
12624 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
12625 enum built_in_function fcode
)
12627 tree dest
, size
, len
, fn
, fmt
, flag
;
12628 const char *fmt_str
;
12630 /* Verify the required arguments in the original call. */
12631 if (call_expr_nargs (exp
) < 5)
12633 dest
= CALL_EXPR_ARG (exp
, 0);
12634 if (!validate_arg (dest
, POINTER_TYPE
))
12636 len
= CALL_EXPR_ARG (exp
, 1);
12637 if (!validate_arg (len
, INTEGER_TYPE
))
12639 flag
= CALL_EXPR_ARG (exp
, 2);
12640 if (!validate_arg (flag
, INTEGER_TYPE
))
12642 size
= CALL_EXPR_ARG (exp
, 3);
12643 if (!validate_arg (size
, INTEGER_TYPE
))
12645 fmt
= CALL_EXPR_ARG (exp
, 4);
12646 if (!validate_arg (fmt
, POINTER_TYPE
))
12649 if (! host_integerp (size
, 1))
12652 if (! integer_all_onesp (size
))
12654 if (! host_integerp (len
, 1))
12656 /* If LEN is not constant, try MAXLEN too.
12657 For MAXLEN only allow optimizing into non-_ocs function
12658 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12659 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12665 if (tree_int_cst_lt (size
, maxlen
))
12669 if (!init_target_chars ())
12672 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12673 or if format doesn't contain % chars or is "%s". */
12674 if (! integer_zerop (flag
))
12676 fmt_str
= c_getstr (fmt
);
12677 if (fmt_str
== NULL
)
12679 if (strchr (fmt_str
, target_percent
) != NULL
12680 && strcmp (fmt_str
, target_percent_s
))
12684 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12686 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12687 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12691 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
12694 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12695 FMT and ARG are the arguments to the call; we don't fold cases with
12696 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12698 Return NULL_TREE if no simplification was possible, otherwise return the
12699 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12700 code of the function to be simplified. */
12703 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
12704 enum built_in_function fcode
)
12706 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12707 const char *fmt_str
= NULL
;
12709 /* If the return value is used, don't do the transformation. */
12713 /* Verify the required arguments in the original call. */
12714 if (!validate_arg (fmt
, POINTER_TYPE
))
12717 /* Check whether the format is a literal string constant. */
12718 fmt_str
= c_getstr (fmt
);
12719 if (fmt_str
== NULL
)
12722 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12724 /* If we're using an unlocked function, assume the other
12725 unlocked functions exist explicitly. */
12726 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12727 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12731 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12732 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12735 if (!init_target_chars ())
12738 if (strcmp (fmt_str
, target_percent_s
) == 0
12739 || strchr (fmt_str
, target_percent
) == NULL
)
12743 if (strcmp (fmt_str
, target_percent_s
) == 0)
12745 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12748 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12751 str
= c_getstr (arg
);
12757 /* The format specifier doesn't contain any '%' characters. */
12758 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12764 /* If the string was "", printf does nothing. */
12765 if (str
[0] == '\0')
12766 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12768 /* If the string has length of 1, call putchar. */
12769 if (str
[1] == '\0')
12771 /* Given printf("c"), (where c is any one character,)
12772 convert "c"[0] to an int and pass that to the replacement
12774 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12776 call
= build_call_expr (fn_putchar
, 1, newarg
);
12780 /* If the string was "string\n", call puts("string"). */
12781 size_t len
= strlen (str
);
12782 if ((unsigned char)str
[len
- 1] == target_newline
)
12784 /* Create a NUL-terminated string that's one char shorter
12785 than the original, stripping off the trailing '\n'. */
12786 char *newstr
= XALLOCAVEC (char, len
);
12787 memcpy (newstr
, str
, len
- 1);
12788 newstr
[len
- 1] = 0;
12790 newarg
= build_string_literal (len
, newstr
);
12792 call
= build_call_expr (fn_puts
, 1, newarg
);
12795 /* We'd like to arrange to call fputs(string,stdout) here,
12796 but we need stdout and don't have a way to get it yet. */
12801 /* The other optimizations can be done only on the non-va_list variants. */
12802 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12805 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12806 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12808 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12811 call
= build_call_expr (fn_puts
, 1, arg
);
12814 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12815 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12817 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12820 call
= build_call_expr (fn_putchar
, 1, arg
);
12826 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12829 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12830 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12831 more than 3 arguments, and ARG may be null in the 2-argument case.
12833 Return NULL_TREE if no simplification was possible, otherwise return the
12834 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12835 code of the function to be simplified. */
12838 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
12839 enum built_in_function fcode
)
12841 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12842 const char *fmt_str
= NULL
;
12844 /* If the return value is used, don't do the transformation. */
12848 /* Verify the required arguments in the original call. */
12849 if (!validate_arg (fp
, POINTER_TYPE
))
12851 if (!validate_arg (fmt
, POINTER_TYPE
))
12854 /* Check whether the format is a literal string constant. */
12855 fmt_str
= c_getstr (fmt
);
12856 if (fmt_str
== NULL
)
12859 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12861 /* If we're using an unlocked function, assume the other
12862 unlocked functions exist explicitly. */
12863 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12864 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12868 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12869 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12872 if (!init_target_chars ())
12875 /* If the format doesn't contain % args or %%, use strcpy. */
12876 if (strchr (fmt_str
, target_percent
) == NULL
)
12878 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12882 /* If the format specifier was "", fprintf does nothing. */
12883 if (fmt_str
[0] == '\0')
12885 /* If FP has side-effects, just wait until gimplification is
12887 if (TREE_SIDE_EFFECTS (fp
))
12890 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12893 /* When "string" doesn't contain %, replace all cases of
12894 fprintf (fp, string) with fputs (string, fp). The fputs
12895 builtin will take care of special cases like length == 1. */
12897 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12900 /* The other optimizations can be done only on the non-va_list variants. */
12901 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12904 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12905 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12907 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12910 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12913 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12914 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12916 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12919 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12924 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12927 /* Initialize format string characters in the target charset. */
12930 init_target_chars (void)
12935 target_newline
= lang_hooks
.to_target_charset ('\n');
12936 target_percent
= lang_hooks
.to_target_charset ('%');
12937 target_c
= lang_hooks
.to_target_charset ('c');
12938 target_s
= lang_hooks
.to_target_charset ('s');
12939 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12943 target_percent_c
[0] = target_percent
;
12944 target_percent_c
[1] = target_c
;
12945 target_percent_c
[2] = '\0';
12947 target_percent_s
[0] = target_percent
;
12948 target_percent_s
[1] = target_s
;
12949 target_percent_s
[2] = '\0';
12951 target_percent_s_newline
[0] = target_percent
;
12952 target_percent_s_newline
[1] = target_s
;
12953 target_percent_s_newline
[2] = target_newline
;
12954 target_percent_s_newline
[3] = '\0';
12961 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12962 and no overflow/underflow occurred. INEXACT is true if M was not
12963 exactly calculated. TYPE is the tree type for the result. This
12964 function assumes that you cleared the MPFR flags and then
12965 calculated M to see if anything subsequently set a flag prior to
12966 entering this function. Return NULL_TREE if any checks fail. */
12969 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12971 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12972 overflow/underflow occurred. If -frounding-math, proceed iff the
12973 result of calling FUNC was exact. */
12974 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12975 && (!flag_rounding_math
|| !inexact
))
12977 REAL_VALUE_TYPE rr
;
12979 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12980 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12981 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12982 but the mpft_t is not, then we underflowed in the
12984 if (real_isfinite (&rr
)
12985 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12987 REAL_VALUE_TYPE rmode
;
12989 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12990 /* Proceed iff the specified mode can hold the value. */
12991 if (real_identical (&rmode
, &rr
))
12992 return build_real (type
, rmode
);
12998 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12999 FUNC on it and return the resulting value as a tree with type TYPE.
13000 If MIN and/or MAX are not NULL, then the supplied ARG must be
13001 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13002 acceptable values, otherwise they are not. The mpfr precision is
13003 set to the precision of TYPE. We assume that function FUNC returns
13004 zero if the result could be calculated exactly within the requested
13008 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13009 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13012 tree result
= NULL_TREE
;
13016 /* To proceed, MPFR must exactly represent the target floating point
13017 format, which only happens when the target base equals two. */
13018 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13019 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13021 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13023 if (real_isfinite (ra
)
13024 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13025 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13027 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13028 const int prec
= fmt
->p
;
13029 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13033 mpfr_init2 (m
, prec
);
13034 mpfr_from_real (m
, ra
, GMP_RNDN
);
13035 mpfr_clear_flags ();
13036 inexact
= func (m
, m
, rnd
);
13037 result
= do_mpfr_ckconv (m
, type
, inexact
);
13045 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13046 FUNC on it and return the resulting value as a tree with type TYPE.
13047 The mpfr precision is set to the precision of TYPE. We assume that
13048 function FUNC returns zero if the result could be calculated
13049 exactly within the requested precision. */
13052 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13053 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13055 tree result
= NULL_TREE
;
13060 /* To proceed, MPFR must exactly represent the target floating point
13061 format, which only happens when the target base equals two. */
13062 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13063 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13064 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13066 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13067 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13069 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13071 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13072 const int prec
= fmt
->p
;
13073 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13077 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13078 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13079 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13080 mpfr_clear_flags ();
13081 inexact
= func (m1
, m1
, m2
, rnd
);
13082 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13083 mpfr_clears (m1
, m2
, NULL
);
13090 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13091 FUNC on it and return the resulting value as a tree with type TYPE.
13092 The mpfr precision is set to the precision of TYPE. We assume that
13093 function FUNC returns zero if the result could be calculated
13094 exactly within the requested precision. */
13097 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13098 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13100 tree result
= NULL_TREE
;
13106 /* To proceed, MPFR must exactly represent the target floating point
13107 format, which only happens when the target base equals two. */
13108 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13109 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13110 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13111 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13113 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13114 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13115 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13117 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13119 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13120 const int prec
= fmt
->p
;
13121 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13125 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13126 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13127 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13128 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13129 mpfr_clear_flags ();
13130 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13131 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13132 mpfr_clears (m1
, m2
, m3
, NULL
);
13139 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13140 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13141 If ARG_SINP and ARG_COSP are NULL then the result is returned
13142 as a complex value.
13143 The type is taken from the type of ARG and is used for setting the
13144 precision of the calculation and results. */
13147 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13149 tree
const type
= TREE_TYPE (arg
);
13150 tree result
= NULL_TREE
;
13154 /* To proceed, MPFR must exactly represent the target floating point
13155 format, which only happens when the target base equals two. */
13156 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13157 && TREE_CODE (arg
) == REAL_CST
13158 && !TREE_OVERFLOW (arg
))
13160 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13162 if (real_isfinite (ra
))
13164 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13165 const int prec
= fmt
->p
;
13166 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13167 tree result_s
, result_c
;
13171 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13172 mpfr_from_real (m
, ra
, GMP_RNDN
);
13173 mpfr_clear_flags ();
13174 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13175 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13176 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13177 mpfr_clears (m
, ms
, mc
, NULL
);
13178 if (result_s
&& result_c
)
13180 /* If we are to return in a complex value do so. */
13181 if (!arg_sinp
&& !arg_cosp
)
13182 return build_complex (build_complex_type (type
),
13183 result_c
, result_s
);
13185 /* Dereference the sin/cos pointer arguments. */
13186 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13187 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13188 /* Proceed if valid pointer type were passed in. */
13189 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13190 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13192 /* Set the values. */
13193 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13195 TREE_SIDE_EFFECTS (result_s
) = 1;
13196 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13198 TREE_SIDE_EFFECTS (result_c
) = 1;
13199 /* Combine the assignments into a compound expr. */
13200 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13201 result_s
, result_c
));
13209 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13210 two-argument mpfr order N Bessel function FUNC on them and return
13211 the resulting value as a tree with type TYPE. The mpfr precision
13212 is set to the precision of TYPE. We assume that function FUNC
13213 returns zero if the result could be calculated exactly within the
13214 requested precision. */
13216 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13217 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13218 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13220 tree result
= NULL_TREE
;
13225 /* To proceed, MPFR must exactly represent the target floating point
13226 format, which only happens when the target base equals two. */
13227 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13228 && host_integerp (arg1
, 0)
13229 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13231 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13232 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13235 && real_isfinite (ra
)
13236 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13238 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13239 const int prec
= fmt
->p
;
13240 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13244 mpfr_init2 (m
, prec
);
13245 mpfr_from_real (m
, ra
, GMP_RNDN
);
13246 mpfr_clear_flags ();
13247 inexact
= func (m
, n
, m
, rnd
);
13248 result
= do_mpfr_ckconv (m
, type
, inexact
);
13256 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13257 the pointer *(ARG_QUO) and return the result. The type is taken
13258 from the type of ARG0 and is used for setting the precision of the
13259 calculation and results. */
13262 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13264 tree
const type
= TREE_TYPE (arg0
);
13265 tree result
= NULL_TREE
;
13270 /* To proceed, MPFR must exactly represent the target floating point
13271 format, which only happens when the target base equals two. */
13272 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13273 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13274 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13276 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13277 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13279 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13281 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13282 const int prec
= fmt
->p
;
13283 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13288 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13289 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13290 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13291 mpfr_clear_flags ();
13292 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13293 /* Remquo is independent of the rounding mode, so pass
13294 inexact=0 to do_mpfr_ckconv(). */
13295 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13296 mpfr_clears (m0
, m1
, NULL
);
13299 /* MPFR calculates quo in the host's long so it may
13300 return more bits in quo than the target int can hold
13301 if sizeof(host long) > sizeof(target int). This can
13302 happen even for native compilers in LP64 mode. In
13303 these cases, modulo the quo value with the largest
13304 number that the target int can hold while leaving one
13305 bit for the sign. */
13306 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13307 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13309 /* Dereference the quo pointer argument. */
13310 arg_quo
= build_fold_indirect_ref (arg_quo
);
13311 /* Proceed iff a valid pointer type was passed in. */
13312 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13314 /* Set the value. */
13315 tree result_quo
= fold_build2 (MODIFY_EXPR
,
13316 TREE_TYPE (arg_quo
), arg_quo
,
13317 build_int_cst (NULL
, integer_quo
));
13318 TREE_SIDE_EFFECTS (result_quo
) = 1;
13319 /* Combine the quo assignment with the rem. */
13320 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13321 result_quo
, result_rem
));
13329 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13330 resulting value as a tree with type TYPE. The mpfr precision is
13331 set to the precision of TYPE. We assume that this mpfr function
13332 returns zero if the result could be calculated exactly within the
13333 requested precision. In addition, the integer pointer represented
13334 by ARG_SG will be dereferenced and set to the appropriate signgam
13338 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13340 tree result
= NULL_TREE
;
13344 /* To proceed, MPFR must exactly represent the target floating point
13345 format, which only happens when the target base equals two. Also
13346 verify ARG is a constant and that ARG_SG is an int pointer. */
13347 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13348 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13349 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13350 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13352 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13354 /* In addition to NaN and Inf, the argument cannot be zero or a
13355 negative integer. */
13356 if (real_isfinite (ra
)
13357 && ra
->cl
!= rvc_zero
13358 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
13360 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13361 const int prec
= fmt
->p
;
13362 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13367 mpfr_init2 (m
, prec
);
13368 mpfr_from_real (m
, ra
, GMP_RNDN
);
13369 mpfr_clear_flags ();
13370 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
13371 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13377 /* Dereference the arg_sg pointer argument. */
13378 arg_sg
= build_fold_indirect_ref (arg_sg
);
13379 /* Assign the signgam value into *arg_sg. */
13380 result_sg
= fold_build2 (MODIFY_EXPR
,
13381 TREE_TYPE (arg_sg
), arg_sg
,
13382 build_int_cst (NULL
, sg
));
13383 TREE_SIDE_EFFECTS (result_sg
) = 1;
13384 /* Combine the signgam assignment with the lgamma result. */
13385 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13386 result_sg
, result_lg
));
13395 The functions below provide an alternate interface for folding
13396 builtin function calls presented as GIMPLE_CALL statements rather
13397 than as CALL_EXPRs. The folded result is still expressed as a
13398 tree. There is too much code duplication in the handling of
13399 varargs functions, and a more intrusive re-factoring would permit
13400 better sharing of code between the tree and statement-based
13401 versions of these functions. */
13403 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13404 along with N new arguments specified as the "..." parameters. SKIP
13405 is the number of arguments in STMT to be omitted. This function is used
13406 to do varargs-to-varargs transformations. */
13409 gimple_rewrite_call_expr (gimple stmt
, int skip
, tree fndecl
, int n
, ...)
13411 int oldnargs
= gimple_call_num_args (stmt
);
13412 int nargs
= oldnargs
- skip
+ n
;
13413 tree fntype
= TREE_TYPE (fndecl
);
13414 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
13419 buffer
= XALLOCAVEC (tree
, nargs
);
13421 for (i
= 0; i
< n
; i
++)
13422 buffer
[i
] = va_arg (ap
, tree
);
13424 for (j
= skip
; j
< oldnargs
; j
++, i
++)
13425 buffer
[i
] = gimple_call_arg (stmt
, j
);
13427 return fold (build_call_array (TREE_TYPE (fntype
), fn
, nargs
, buffer
));
13430 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13431 a normal call should be emitted rather than expanding the function
13432 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13435 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
13437 tree dest
, size
, len
, fn
, fmt
, flag
;
13438 const char *fmt_str
;
13439 int nargs
= gimple_call_num_args (stmt
);
13441 /* Verify the required arguments in the original call. */
13444 dest
= gimple_call_arg (stmt
, 0);
13445 if (!validate_arg (dest
, POINTER_TYPE
))
13447 flag
= gimple_call_arg (stmt
, 1);
13448 if (!validate_arg (flag
, INTEGER_TYPE
))
13450 size
= gimple_call_arg (stmt
, 2);
13451 if (!validate_arg (size
, INTEGER_TYPE
))
13453 fmt
= gimple_call_arg (stmt
, 3);
13454 if (!validate_arg (fmt
, POINTER_TYPE
))
13457 if (! host_integerp (size
, 1))
13462 if (!init_target_chars ())
13465 /* Check whether the format is a literal string constant. */
13466 fmt_str
= c_getstr (fmt
);
13467 if (fmt_str
!= NULL
)
13469 /* If the format doesn't contain % args or %%, we know the size. */
13470 if (strchr (fmt_str
, target_percent
) == 0)
13472 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13473 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13475 /* If the format is "%s" and first ... argument is a string literal,
13476 we know the size too. */
13477 else if (fcode
== BUILT_IN_SPRINTF_CHK
13478 && strcmp (fmt_str
, target_percent_s
) == 0)
13484 arg
= gimple_call_arg (stmt
, 4);
13485 if (validate_arg (arg
, POINTER_TYPE
))
13487 len
= c_strlen (arg
, 1);
13488 if (! len
|| ! host_integerp (len
, 1))
13495 if (! integer_all_onesp (size
))
13497 if (! len
|| ! tree_int_cst_lt (len
, size
))
13501 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13502 or if format doesn't contain % chars or is "%s". */
13503 if (! integer_zerop (flag
))
13505 if (fmt_str
== NULL
)
13507 if (strchr (fmt_str
, target_percent
) != NULL
13508 && strcmp (fmt_str
, target_percent_s
))
13512 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13513 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
13514 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
13518 return gimple_rewrite_call_expr (stmt
, 4, fn
, 2, dest
, fmt
);
13521 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13522 a normal call should be emitted rather than expanding the function
13523 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13524 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13525 passed as second argument. */
13528 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
13529 enum built_in_function fcode
)
13531 tree dest
, size
, len
, fn
, fmt
, flag
;
13532 const char *fmt_str
;
13534 /* Verify the required arguments in the original call. */
13535 if (gimple_call_num_args (stmt
) < 5)
13537 dest
= gimple_call_arg (stmt
, 0);
13538 if (!validate_arg (dest
, POINTER_TYPE
))
13540 len
= gimple_call_arg (stmt
, 1);
13541 if (!validate_arg (len
, INTEGER_TYPE
))
13543 flag
= gimple_call_arg (stmt
, 2);
13544 if (!validate_arg (flag
, INTEGER_TYPE
))
13546 size
= gimple_call_arg (stmt
, 3);
13547 if (!validate_arg (size
, INTEGER_TYPE
))
13549 fmt
= gimple_call_arg (stmt
, 4);
13550 if (!validate_arg (fmt
, POINTER_TYPE
))
13553 if (! host_integerp (size
, 1))
13556 if (! integer_all_onesp (size
))
13558 if (! host_integerp (len
, 1))
13560 /* If LEN is not constant, try MAXLEN too.
13561 For MAXLEN only allow optimizing into non-_ocs function
13562 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13563 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13569 if (tree_int_cst_lt (size
, maxlen
))
13573 if (!init_target_chars ())
13576 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13577 or if format doesn't contain % chars or is "%s". */
13578 if (! integer_zerop (flag
))
13580 fmt_str
= c_getstr (fmt
);
13581 if (fmt_str
== NULL
)
13583 if (strchr (fmt_str
, target_percent
) != NULL
13584 && strcmp (fmt_str
, target_percent_s
))
13588 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13590 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
13591 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
13595 return gimple_rewrite_call_expr (stmt
, 5, fn
, 3, dest
, len
, fmt
);
13598 /* Builtins with folding operations that operate on "..." arguments
13599 need special handling; we need to store the arguments in a convenient
13600 data structure before attempting any folding. Fortunately there are
13601 only a few builtins that fall into this category. FNDECL is the
13602 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13603 result of the function call is ignored. */
13606 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
, bool ignore ATTRIBUTE_UNUSED
)
13608 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
13609 tree ret
= NULL_TREE
;
13613 case BUILT_IN_SPRINTF_CHK
:
13614 case BUILT_IN_VSPRINTF_CHK
:
13615 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
13618 case BUILT_IN_SNPRINTF_CHK
:
13619 case BUILT_IN_VSNPRINTF_CHK
:
13620 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
13627 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
13628 TREE_NO_WARNING (ret
) = 1;
13634 /* A wrapper function for builtin folding that prevents warnings for
13635 "statement without effect" and the like, caused by removing the
13636 call node earlier than the warning is generated. */
13639 fold_call_stmt (gimple stmt
, bool ignore
)
13641 tree ret
= NULL_TREE
;
13642 tree fndecl
= gimple_call_fndecl (stmt
);
13644 && TREE_CODE (fndecl
) == FUNCTION_DECL
13645 && DECL_BUILT_IN (fndecl
)
13646 && !gimple_call_va_arg_pack_p (stmt
))
13648 int nargs
= gimple_call_num_args (stmt
);
13650 /* FIXME: Don't use a list in this interface. */
13651 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
13653 tree arglist
= NULL_TREE
;
13655 for (i
= nargs
- 1; i
>= 0; i
--)
13656 arglist
= tree_cons (NULL_TREE
, gimple_call_arg (stmt
, i
), arglist
);
13657 return targetm
.fold_builtin (fndecl
, arglist
, ignore
);
13661 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
13663 tree args
[MAX_ARGS_TO_FOLD_BUILTIN
];
13665 for (i
= 0; i
< nargs
; i
++)
13666 args
[i
] = gimple_call_arg (stmt
, i
);
13667 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
13670 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
13673 /* Propagate location information from original call to
13674 expansion of builtin. Otherwise things like
13675 maybe_emit_chk_warning, that operate on the expansion
13676 of a builtin, will use the wrong location information. */
13677 if (gimple_has_location (stmt
))
13679 tree realret
= ret
;
13680 if (TREE_CODE (ret
) == NOP_EXPR
)
13681 realret
= TREE_OPERAND (ret
, 0);
13682 if (CAN_HAVE_LOCATION_P (realret
)
13683 && !EXPR_HAS_LOCATION (realret
))
13684 SET_EXPR_LOCATION (realret
, gimple_location (stmt
));