1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
64 struct target_builtins default_target_builtins
;
66 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names
[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names
[(int) END_BUILTINS
] =
76 #include "builtins.def"
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls
[(int) END_BUILTINS
];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls
[(int) END_BUILTINS
];
88 static const char *c_getstr (tree
);
89 static rtx
c_readstr (const char *, enum machine_mode
);
90 static int target_char_cast (tree
, char *);
91 static rtx
get_memory_rtx (tree
, tree
);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx
result_vector (int, rtx
);
97 static void expand_builtin_update_setjmp_buf (rtx
);
98 static void expand_builtin_prefetch (tree
);
99 static rtx
expand_builtin_apply_args (void);
100 static rtx
expand_builtin_apply_args_1 (void);
101 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
102 static void expand_builtin_return (rtx
);
103 static enum type_class
type_to_class (tree
);
104 static rtx
expand_builtin_classify_type (tree
);
105 static void expand_errno_check (tree
, rtx
);
106 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
107 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
111 static rtx
expand_builtin_sincos (tree
);
112 static rtx
expand_builtin_cexpi (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
115 static rtx
expand_builtin_next_arg (void);
116 static rtx
expand_builtin_va_start (tree
);
117 static rtx
expand_builtin_va_end (tree
);
118 static rtx
expand_builtin_va_copy (tree
);
119 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strcmp (tree
, rtx
);
121 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
122 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static rtx
expand_builtin_memcpy (tree
, rtx
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
126 enum machine_mode
, int);
127 static rtx
expand_builtin_strcpy (tree
, rtx
);
128 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
129 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
130 static rtx
expand_builtin_strncpy (tree
, rtx
);
131 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
133 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
134 static rtx
expand_builtin_bzero (tree
);
135 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_alloca (tree
, bool);
137 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
138 static rtx
expand_builtin_frame_address (tree
, tree
);
139 static tree
stabilize_va_list_loc (location_t
, tree
, int);
140 static rtx
expand_builtin_expect (tree
, rtx
);
141 static tree
fold_builtin_constant_p (tree
);
142 static tree
fold_builtin_expect (location_t
, tree
, tree
);
143 static tree
fold_builtin_classify_type (tree
);
144 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
145 static tree
fold_builtin_inf (location_t
, tree
, int);
146 static tree
fold_builtin_nan (tree
, tree
, int);
147 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
148 static bool validate_arg (const_tree
, enum tree_code code
);
149 static bool integer_valued_real_p (tree
);
150 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
151 static bool readonly_data_expr (tree
);
152 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
153 static rtx
expand_builtin_signbit (tree
, rtx
);
154 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
155 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
156 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
157 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
158 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
160 static tree
fold_builtin_tan (tree
, tree
);
161 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
162 static tree
fold_builtin_floor (location_t
, tree
, tree
);
163 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
164 static tree
fold_builtin_round (location_t
, tree
, tree
);
165 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
166 static tree
fold_builtin_bitop (tree
, tree
);
167 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
168 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
169 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
170 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
171 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
172 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
174 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
175 static tree
fold_builtin_isascii (location_t
, tree
);
176 static tree
fold_builtin_toascii (location_t
, tree
);
177 static tree
fold_builtin_isdigit (location_t
, tree
);
178 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
179 static tree
fold_builtin_abs (location_t
, tree
, tree
);
180 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
182 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
183 static tree
fold_builtin_0 (location_t
, tree
, bool);
184 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
185 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
186 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
187 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
188 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
190 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
194 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
195 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
196 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
197 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
199 static rtx
expand_builtin_object_size (tree
);
200 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
201 enum built_in_function
);
202 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
203 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
204 static void maybe_emit_free_warning (tree
);
205 static tree
fold_builtin_object_size (tree
, tree
);
206 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
207 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
208 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
209 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
210 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
211 enum built_in_function
);
212 static bool init_target_chars (void);
214 static unsigned HOST_WIDE_INT target_newline
;
215 static unsigned HOST_WIDE_INT target_percent
;
216 static unsigned HOST_WIDE_INT target_c
;
217 static unsigned HOST_WIDE_INT target_s
;
218 static char target_percent_c
[3];
219 static char target_percent_s
[3];
220 static char target_percent_s_newline
[4];
221 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
222 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
223 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
224 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
225 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
226 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
227 static tree
do_mpfr_sincos (tree
, tree
, tree
);
228 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
229 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
230 const REAL_VALUE_TYPE
*, bool);
231 static tree
do_mpfr_remquo (tree
, tree
, tree
);
232 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
234 /* Return true if NAME starts with __builtin_ or __sync_. */
237 is_builtin_name (const char *name
)
239 if (strncmp (name
, "__builtin_", 10) == 0)
241 if (strncmp (name
, "__sync_", 7) == 0)
247 /* Return true if DECL is a function symbol representing a built-in. */
250 is_builtin_fn (tree decl
)
252 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
261 called_as_built_in (tree node
)
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
266 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
267 return is_builtin_name (name
);
270 /* Return the alignment in bits of EXP, an object.
271 Don't return more than MAX_ALIGN no matter what. */
274 get_object_alignment (tree exp
, unsigned int max_align
)
276 HOST_WIDE_INT bitsize
, bitpos
;
278 enum machine_mode mode
;
279 int unsignedp
, volatilep
;
280 unsigned int align
, inner
;
282 /* Get the innermost object and the constant (bitpos) and possibly
283 variable (offset) offset of the access. */
284 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
285 &mode
, &unsignedp
, &volatilep
, true);
287 /* Extract alignment information from the innermost object and
288 possibly adjust bitpos and offset. */
289 if (TREE_CODE (exp
) == CONST_DECL
)
290 exp
= DECL_INITIAL (exp
);
292 && TREE_CODE (exp
) != LABEL_DECL
)
293 align
= DECL_ALIGN (exp
);
294 else if (CONSTANT_CLASS_P (exp
))
296 align
= TYPE_ALIGN (TREE_TYPE (exp
));
297 #ifdef CONSTANT_ALIGNMENT
298 align
= (unsigned)CONSTANT_ALIGNMENT (exp
, align
);
301 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
302 align
= TYPE_ALIGN (TREE_TYPE (exp
));
303 else if (TREE_CODE (exp
) == INDIRECT_REF
)
304 align
= TYPE_ALIGN (TREE_TYPE (exp
));
305 else if (TREE_CODE (exp
) == MEM_REF
)
307 tree addr
= TREE_OPERAND (exp
, 0);
308 struct ptr_info_def
*pi
;
309 if (TREE_CODE (addr
) == BIT_AND_EXPR
310 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
312 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
313 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
314 align
*= BITS_PER_UNIT
;
315 addr
= TREE_OPERAND (addr
, 0);
318 align
= BITS_PER_UNIT
;
319 if (TREE_CODE (addr
) == SSA_NAME
320 && (pi
= SSA_NAME_PTR_INFO (addr
)))
322 bitpos
+= (pi
->misalign
* BITS_PER_UNIT
) & ~(align
- 1);
323 align
= MAX (pi
->align
* BITS_PER_UNIT
, align
);
325 else if (TREE_CODE (addr
) == ADDR_EXPR
)
326 align
= MAX (align
, get_object_alignment (TREE_OPERAND (addr
, 0),
328 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
330 else if (TREE_CODE (exp
) == TARGET_MEM_REF
)
332 struct ptr_info_def
*pi
;
333 tree addr
= TMR_BASE (exp
);
334 if (TREE_CODE (addr
) == BIT_AND_EXPR
335 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
337 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
338 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
339 align
*= BITS_PER_UNIT
;
340 addr
= TREE_OPERAND (addr
, 0);
343 align
= BITS_PER_UNIT
;
344 if (TREE_CODE (addr
) == SSA_NAME
345 && (pi
= SSA_NAME_PTR_INFO (addr
)))
347 bitpos
+= (pi
->misalign
* BITS_PER_UNIT
) & ~(align
- 1);
348 align
= MAX (pi
->align
* BITS_PER_UNIT
, align
);
350 else if (TREE_CODE (addr
) == ADDR_EXPR
)
351 align
= MAX (align
, get_object_alignment (TREE_OPERAND (addr
, 0),
353 if (TMR_OFFSET (exp
))
354 bitpos
+= TREE_INT_CST_LOW (TMR_OFFSET (exp
)) * BITS_PER_UNIT
;
355 if (TMR_INDEX (exp
) && TMR_STEP (exp
))
357 unsigned HOST_WIDE_INT step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
358 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
360 else if (TMR_INDEX (exp
))
361 align
= BITS_PER_UNIT
;
362 if (TMR_INDEX2 (exp
))
363 align
= BITS_PER_UNIT
;
366 align
= BITS_PER_UNIT
;
368 /* If there is a non-constant offset part extract the maximum
369 alignment that can prevail. */
375 if (TREE_CODE (offset
) == PLUS_EXPR
)
377 next_offset
= TREE_OPERAND (offset
, 0);
378 offset
= TREE_OPERAND (offset
, 1);
382 if (host_integerp (offset
, 1))
384 /* Any overflow in calculating offset_bits won't change
387 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
390 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
392 else if (TREE_CODE (offset
) == MULT_EXPR
393 && host_integerp (TREE_OPERAND (offset
, 1), 1))
395 /* Any overflow in calculating offset_factor won't change
397 unsigned offset_factor
398 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
402 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
406 inner
= MIN (inner
, BITS_PER_UNIT
);
409 offset
= next_offset
;
412 /* Alignment is innermost object alignment adjusted by the constant
413 and non-constant offset parts. */
414 align
= MIN (align
, inner
);
415 bitpos
= bitpos
& (align
- 1);
417 /* align and bitpos now specify known low bits of the pointer.
418 ptr & (align - 1) == bitpos. */
421 align
= (bitpos
& -bitpos
);
423 return MIN (align
, max_align
);
426 /* Returns true iff we can trust that alignment information has been
427 calculated properly. */
430 can_trust_pointer_alignment (void)
432 /* We rely on TER to compute accurate alignment information. */
433 return (optimize
&& flag_tree_ter
);
436 /* Return the alignment in bits of EXP, a pointer valued expression.
437 But don't return more than MAX_ALIGN no matter what.
438 The alignment returned is, by default, the alignment of the thing that
439 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
441 Otherwise, look at the expression to see if we can do better, i.e., if the
442 expression is actually pointing at an object whose alignment is tighter. */
445 get_pointer_alignment (tree exp
, unsigned int max_align
)
449 if (TREE_CODE (exp
) == ADDR_EXPR
)
450 return get_object_alignment (TREE_OPERAND (exp
, 0), max_align
);
451 else if (TREE_CODE (exp
) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp
)))
454 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
457 return BITS_PER_UNIT
;
458 if (pi
->misalign
!= 0)
459 align
= (pi
->misalign
& -pi
->misalign
);
462 return MIN (max_align
, align
* BITS_PER_UNIT
);
465 return POINTER_TYPE_P (TREE_TYPE (exp
)) ? BITS_PER_UNIT
: 0;
468 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
469 way, because it could contain a zero byte in the middle.
470 TREE_STRING_LENGTH is the size of the character array, not the string.
472 ONLY_VALUE should be nonzero if the result is not going to be emitted
473 into the instruction stream and zero if it is going to be expanded.
474 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
475 is returned, otherwise NULL, since
476 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
477 evaluate the side-effects.
479 The value returned is of type `ssizetype'.
481 Unfortunately, string_constant can't access the values of const char
482 arrays with initializers, so neither can we do so here. */
485 c_strlen (tree src
, int only_value
)
488 HOST_WIDE_INT offset
;
494 if (TREE_CODE (src
) == COND_EXPR
495 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
499 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
500 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
501 if (tree_int_cst_equal (len1
, len2
))
505 if (TREE_CODE (src
) == COMPOUND_EXPR
506 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
507 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
509 loc
= EXPR_LOC_OR_HERE (src
);
511 src
= string_constant (src
, &offset_node
);
515 max
= TREE_STRING_LENGTH (src
) - 1;
516 ptr
= TREE_STRING_POINTER (src
);
518 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
520 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
521 compute the offset to the following null if we don't know where to
522 start searching for it. */
525 for (i
= 0; i
< max
; i
++)
529 /* We don't know the starting offset, but we do know that the string
530 has no internal zero bytes. We can assume that the offset falls
531 within the bounds of the string; otherwise, the programmer deserves
532 what he gets. Subtract the offset from the length of the string,
533 and return that. This would perhaps not be valid if we were dealing
534 with named arrays in addition to literal string constants. */
536 return size_diffop_loc (loc
, size_int (max
), offset_node
);
539 /* We have a known offset into the string. Start searching there for
540 a null character if we can represent it as a single HOST_WIDE_INT. */
541 if (offset_node
== 0)
543 else if (! host_integerp (offset_node
, 0))
546 offset
= tree_low_cst (offset_node
, 0);
548 /* If the offset is known to be out of bounds, warn, and call strlen at
550 if (offset
< 0 || offset
> max
)
552 /* Suppress multiple warnings for propagated constant strings. */
553 if (! TREE_NO_WARNING (src
))
555 warning_at (loc
, 0, "offset outside bounds of constant string");
556 TREE_NO_WARNING (src
) = 1;
561 /* Use strlen to search for the first zero byte. Since any strings
562 constructed with build_string will have nulls appended, we win even
563 if we get handed something like (char[4])"abcd".
565 Since OFFSET is our starting index into the string, no further
566 calculation is needed. */
567 return ssize_int (strlen (ptr
+ offset
));
570 /* Return a char pointer for a C string if it is a string constant
571 or sum of string constant and integer constant. */
578 src
= string_constant (src
, &offset_node
);
582 if (offset_node
== 0)
583 return TREE_STRING_POINTER (src
);
584 else if (!host_integerp (offset_node
, 1)
585 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
588 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
591 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
592 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
595 c_readstr (const char *str
, enum machine_mode mode
)
601 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
606 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
609 if (WORDS_BIG_ENDIAN
)
610 j
= GET_MODE_SIZE (mode
) - i
- 1;
611 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
612 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
613 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
615 gcc_assert (j
< 2 * HOST_BITS_PER_WIDE_INT
);
618 ch
= (unsigned char) str
[i
];
619 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
621 return immed_double_const (c
[0], c
[1], mode
);
624 /* Cast a target constant CST to target CHAR and if that value fits into
625 host char type, return zero and put that value into variable pointed to by
629 target_char_cast (tree cst
, char *p
)
631 unsigned HOST_WIDE_INT val
, hostval
;
633 if (!host_integerp (cst
, 1)
634 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
637 val
= tree_low_cst (cst
, 1);
638 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
639 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
642 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
643 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
652 /* Similar to save_expr, but assumes that arbitrary code is not executed
653 in between the multiple evaluations. In particular, we assume that a
654 non-addressable local variable will not be modified. */
657 builtin_save_expr (tree exp
)
659 if (TREE_ADDRESSABLE (exp
) == 0
660 && (TREE_CODE (exp
) == PARM_DECL
661 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
664 return save_expr (exp
);
667 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
668 times to get the address of either a higher stack frame, or a return
669 address located within it (depending on FNDECL_CODE). */
672 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
676 #ifdef INITIAL_FRAME_ADDRESS_RTX
677 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
681 /* For a zero count with __builtin_return_address, we don't care what
682 frame address we return, because target-specific definitions will
683 override us. Therefore frame pointer elimination is OK, and using
684 the soft frame pointer is OK.
686 For a nonzero count, or a zero count with __builtin_frame_address,
687 we require a stable offset from the current frame pointer to the
688 previous one, so we must use the hard frame pointer, and
689 we must disable frame pointer elimination. */
690 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
691 tem
= frame_pointer_rtx
;
694 tem
= hard_frame_pointer_rtx
;
696 /* Tell reload not to eliminate the frame pointer. */
697 crtl
->accesses_prior_frames
= 1;
701 /* Some machines need special handling before we can access
702 arbitrary frames. For example, on the SPARC, we must first flush
703 all register windows to the stack. */
704 #ifdef SETUP_FRAME_ADDRESSES
706 SETUP_FRAME_ADDRESSES ();
709 /* On the SPARC, the return address is not in the frame, it is in a
710 register. There is no way to access it off of the current frame
711 pointer, but it can be accessed off the previous frame pointer by
712 reading the value from the register window save area. */
713 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
714 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
718 /* Scan back COUNT frames to the specified frame. */
719 for (i
= 0; i
< count
; i
++)
721 /* Assume the dynamic chain pointer is in the word that the
722 frame address points to, unless otherwise specified. */
723 #ifdef DYNAMIC_CHAIN_ADDRESS
724 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
726 tem
= memory_address (Pmode
, tem
);
727 tem
= gen_frame_mem (Pmode
, tem
);
728 tem
= copy_to_reg (tem
);
731 /* For __builtin_frame_address, return what we've got. But, on
732 the SPARC for example, we may have to add a bias. */
733 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
734 #ifdef FRAME_ADDR_RTX
735 return FRAME_ADDR_RTX (tem
);
740 /* For __builtin_return_address, get the return address from that frame. */
741 #ifdef RETURN_ADDR_RTX
742 tem
= RETURN_ADDR_RTX (count
, tem
);
744 tem
= memory_address (Pmode
,
745 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
746 tem
= gen_frame_mem (Pmode
, tem
);
751 /* Alias set used for setjmp buffer. */
752 static alias_set_type setjmp_alias_set
= -1;
754 /* Construct the leading half of a __builtin_setjmp call. Control will
755 return to RECEIVER_LABEL. This is also called directly by the SJLJ
756 exception handling code. */
759 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
761 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
765 if (setjmp_alias_set
== -1)
766 setjmp_alias_set
= new_alias_set ();
768 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
770 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
772 /* We store the frame pointer and the address of receiver_label in
773 the buffer and use the rest of it for the stack save area, which
774 is machine-dependent. */
776 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
777 set_mem_alias_set (mem
, setjmp_alias_set
);
778 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
780 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
781 set_mem_alias_set (mem
, setjmp_alias_set
);
783 emit_move_insn (validize_mem (mem
),
784 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
786 stack_save
= gen_rtx_MEM (sa_mode
,
787 plus_constant (buf_addr
,
788 2 * GET_MODE_SIZE (Pmode
)));
789 set_mem_alias_set (stack_save
, setjmp_alias_set
);
790 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
792 /* If there is further processing to do, do it. */
793 #ifdef HAVE_builtin_setjmp_setup
794 if (HAVE_builtin_setjmp_setup
)
795 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
798 /* Tell optimize_save_area_alloca that extra work is going to
799 need to go on during alloca. */
800 cfun
->calls_setjmp
= 1;
802 /* We have a nonlocal label. */
803 cfun
->has_nonlocal_label
= 1;
806 /* Construct the trailing part of a __builtin_setjmp call. This is
807 also called directly by the SJLJ exception handling code. */
810 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
814 /* Clobber the FP when we get here, so we have to make sure it's
815 marked as used by this function. */
816 emit_use (hard_frame_pointer_rtx
);
818 /* Mark the static chain as clobbered here so life information
819 doesn't get messed up for it. */
820 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
821 if (chain
&& REG_P (chain
))
822 emit_clobber (chain
);
824 /* Now put in the code to restore the frame pointer, and argument
825 pointer, if needed. */
826 #ifdef HAVE_nonlocal_goto
827 if (! HAVE_nonlocal_goto
)
830 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
831 /* This might change the hard frame pointer in ways that aren't
832 apparent to early optimization passes, so force a clobber. */
833 emit_clobber (hard_frame_pointer_rtx
);
836 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
837 if (fixed_regs
[ARG_POINTER_REGNUM
])
839 #ifdef ELIMINABLE_REGS
841 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
843 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
844 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
845 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
848 if (i
== ARRAY_SIZE (elim_regs
))
851 /* Now restore our arg pointer from the address at which it
852 was saved in our stack frame. */
853 emit_move_insn (crtl
->args
.internal_arg_pointer
,
854 copy_to_reg (get_arg_pointer_save_area ()));
859 #ifdef HAVE_builtin_setjmp_receiver
860 if (HAVE_builtin_setjmp_receiver
)
861 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
864 #ifdef HAVE_nonlocal_goto_receiver
865 if (HAVE_nonlocal_goto_receiver
)
866 emit_insn (gen_nonlocal_goto_receiver ());
871 /* We must not allow the code we just generated to be reordered by
872 scheduling. Specifically, the update of the frame pointer must
873 happen immediately, not later. */
874 emit_insn (gen_blockage ());
877 /* __builtin_longjmp is passed a pointer to an array of five words (not
878 all will be used on all machines). It operates similarly to the C
879 library function of the same name, but is more efficient. Much of
880 the code below is copied from the handling of non-local gotos. */
883 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
885 rtx fp
, lab
, stack
, insn
, last
;
886 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
888 /* DRAP is needed for stack realign if longjmp is expanded to current
890 if (SUPPORTS_STACK_ALIGNMENT
)
891 crtl
->need_drap
= true;
893 if (setjmp_alias_set
== -1)
894 setjmp_alias_set
= new_alias_set ();
896 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
898 buf_addr
= force_reg (Pmode
, buf_addr
);
900 /* We require that the user must pass a second argument of 1, because
901 that is what builtin_setjmp will return. */
902 gcc_assert (value
== const1_rtx
);
904 last
= get_last_insn ();
905 #ifdef HAVE_builtin_longjmp
906 if (HAVE_builtin_longjmp
)
907 emit_insn (gen_builtin_longjmp (buf_addr
));
911 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
912 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
913 GET_MODE_SIZE (Pmode
)));
915 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
916 2 * GET_MODE_SIZE (Pmode
)));
917 set_mem_alias_set (fp
, setjmp_alias_set
);
918 set_mem_alias_set (lab
, setjmp_alias_set
);
919 set_mem_alias_set (stack
, setjmp_alias_set
);
921 /* Pick up FP, label, and SP from the block and jump. This code is
922 from expand_goto in stmt.c; see there for detailed comments. */
923 #ifdef HAVE_nonlocal_goto
924 if (HAVE_nonlocal_goto
)
925 /* We have to pass a value to the nonlocal_goto pattern that will
926 get copied into the static_chain pointer, but it does not matter
927 what that value is, because builtin_setjmp does not use it. */
928 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
932 lab
= copy_to_reg (lab
);
934 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
935 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
937 emit_move_insn (hard_frame_pointer_rtx
, fp
);
938 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
940 emit_use (hard_frame_pointer_rtx
);
941 emit_use (stack_pointer_rtx
);
942 emit_indirect_jump (lab
);
946 /* Search backwards and mark the jump insn as a non-local goto.
947 Note that this precludes the use of __builtin_longjmp to a
948 __builtin_setjmp target in the same function. However, we've
949 already cautioned the user that these functions are for
950 internal exception handling use only. */
951 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
953 gcc_assert (insn
!= last
);
957 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
960 else if (CALL_P (insn
))
965 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
966 and the address of the save area. */
969 expand_builtin_nonlocal_goto (tree exp
)
971 tree t_label
, t_save_area
;
972 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
974 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
977 t_label
= CALL_EXPR_ARG (exp
, 0);
978 t_save_area
= CALL_EXPR_ARG (exp
, 1);
980 r_label
= expand_normal (t_label
);
981 r_label
= convert_memory_address (Pmode
, r_label
);
982 r_save_area
= expand_normal (t_save_area
);
983 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
984 /* Copy the address of the save location to a register just in case it was based
985 on the frame pointer. */
986 r_save_area
= copy_to_reg (r_save_area
);
987 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
988 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
989 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
991 crtl
->has_nonlocal_goto
= 1;
993 #ifdef HAVE_nonlocal_goto
994 /* ??? We no longer need to pass the static chain value, afaik. */
995 if (HAVE_nonlocal_goto
)
996 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1000 r_label
= copy_to_reg (r_label
);
1002 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1003 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1005 /* Restore frame pointer for containing function.
1006 This sets the actual hard register used for the frame pointer
1007 to the location of the function's incoming static chain info.
1008 The non-local goto handler will then adjust it to contain the
1009 proper value and reload the argument pointer, if needed. */
1010 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1011 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
1013 /* USE of hard_frame_pointer_rtx added for consistency;
1014 not clear if really needed. */
1015 emit_use (hard_frame_pointer_rtx
);
1016 emit_use (stack_pointer_rtx
);
1018 /* If the architecture is using a GP register, we must
1019 conservatively assume that the target function makes use of it.
1020 The prologue of functions with nonlocal gotos must therefore
1021 initialize the GP register to the appropriate value, and we
1022 must then make sure that this value is live at the point
1023 of the jump. (Note that this doesn't necessarily apply
1024 to targets with a nonlocal_goto pattern; they are free
1025 to implement it in their own way. Note also that this is
1026 a no-op if the GP register is a global invariant.) */
1027 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1028 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1029 emit_use (pic_offset_table_rtx
);
1031 emit_indirect_jump (r_label
);
1034 /* Search backwards to the jump insn and mark it as a
1036 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1040 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1043 else if (CALL_P (insn
))
1050 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1051 (not all will be used on all machines) that was passed to __builtin_setjmp.
1052 It updates the stack pointer in that block to correspond to the current
1056 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1058 enum machine_mode sa_mode
= Pmode
;
1062 #ifdef HAVE_save_stack_nonlocal
1063 if (HAVE_save_stack_nonlocal
)
1064 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
1066 #ifdef STACK_SAVEAREA_MODE
1067 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1071 = gen_rtx_MEM (sa_mode
,
1074 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
1078 emit_insn (gen_setjmp ());
1081 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
1084 /* Expand a call to __builtin_prefetch. For a target that does not support
1085 data prefetch, evaluate the memory address argument in case it has side
1089 expand_builtin_prefetch (tree exp
)
1091 tree arg0
, arg1
, arg2
;
1095 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1098 arg0
= CALL_EXPR_ARG (exp
, 0);
1100 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1101 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1103 nargs
= call_expr_nargs (exp
);
1105 arg1
= CALL_EXPR_ARG (exp
, 1);
1107 arg1
= integer_zero_node
;
1109 arg2
= CALL_EXPR_ARG (exp
, 2);
1111 arg2
= integer_three_node
;
1113 /* Argument 0 is an address. */
1114 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1116 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1117 if (TREE_CODE (arg1
) != INTEGER_CST
)
1119 error ("second argument to %<__builtin_prefetch%> must be a constant");
1120 arg1
= integer_zero_node
;
1122 op1
= expand_normal (arg1
);
1123 /* Argument 1 must be either zero or one. */
1124 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1126 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1131 /* Argument 2 (locality) must be a compile-time constant int. */
1132 if (TREE_CODE (arg2
) != INTEGER_CST
)
1134 error ("third argument to %<__builtin_prefetch%> must be a constant");
1135 arg2
= integer_zero_node
;
1137 op2
= expand_normal (arg2
);
1138 /* Argument 2 must be 0, 1, 2, or 3. */
1139 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1141 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1145 #ifdef HAVE_prefetch
1148 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1150 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1151 || (GET_MODE (op0
) != Pmode
))
1153 op0
= convert_memory_address (Pmode
, op0
);
1154 op0
= force_reg (Pmode
, op0
);
1156 emit_insn (gen_prefetch (op0
, op1
, op2
));
1160 /* Don't do anything with direct references to volatile memory, but
1161 generate code to handle other side effects. */
1162 if (!MEM_P (op0
) && side_effects_p (op0
))
1166 /* Get a MEM rtx for expression EXP which is the address of an operand
1167 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1168 the maximum length of the block of memory that might be accessed or
1172 get_memory_rtx (tree exp
, tree len
)
1174 tree orig_exp
= exp
;
1178 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1179 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1180 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1181 exp
= TREE_OPERAND (exp
, 0);
1183 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1184 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1186 /* Get an expression we can use to find the attributes to assign to MEM.
1187 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1188 we can. First remove any nops. */
1189 while (CONVERT_EXPR_P (exp
)
1190 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1191 exp
= TREE_OPERAND (exp
, 0);
1194 if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
1195 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1196 && host_integerp (TREE_OPERAND (exp
, 1), 0)
1197 && (off
= tree_low_cst (TREE_OPERAND (exp
, 1), 0)) > 0)
1198 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1199 else if (TREE_CODE (exp
) == ADDR_EXPR
)
1200 exp
= TREE_OPERAND (exp
, 0);
1201 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1202 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1206 /* Honor attributes derived from exp, except for the alias set
1207 (as builtin stringops may alias with anything) and the size
1208 (as stringops may access multiple array elements). */
1211 set_mem_attributes (mem
, exp
, 0);
1214 mem
= adjust_automodify_address_nv (mem
, BLKmode
, NULL
, off
);
1216 /* Allow the string and memory builtins to overflow from one
1217 field into another, see http://gcc.gnu.org/PR23561.
1218 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1219 memory accessed by the string or memory builtin will fit
1220 within the field. */
1221 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1223 tree mem_expr
= MEM_EXPR (mem
);
1224 HOST_WIDE_INT offset
= -1, length
= -1;
1227 while (TREE_CODE (inner
) == ARRAY_REF
1228 || CONVERT_EXPR_P (inner
)
1229 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1230 || TREE_CODE (inner
) == SAVE_EXPR
)
1231 inner
= TREE_OPERAND (inner
, 0);
1233 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1235 if (MEM_OFFSET (mem
)
1236 && CONST_INT_P (MEM_OFFSET (mem
)))
1237 offset
= INTVAL (MEM_OFFSET (mem
));
1239 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1240 length
= tree_low_cst (len
, 0);
1242 while (TREE_CODE (inner
) == COMPONENT_REF
)
1244 tree field
= TREE_OPERAND (inner
, 1);
1245 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1246 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1248 /* Bitfields are generally not byte-addressable. */
1249 gcc_assert (!DECL_BIT_FIELD (field
)
1250 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1251 % BITS_PER_UNIT
) == 0
1252 && host_integerp (DECL_SIZE (field
), 0)
1253 && (TREE_INT_CST_LOW (DECL_SIZE (field
))
1254 % BITS_PER_UNIT
) == 0));
1256 /* If we can prove that the memory starting at XEXP (mem, 0) and
1257 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1258 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1259 fields without DECL_SIZE_UNIT like flexible array members. */
1261 && DECL_SIZE_UNIT (field
)
1262 && host_integerp (DECL_SIZE_UNIT (field
), 0))
1265 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field
));
1268 && offset
+ length
<= size
)
1273 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1274 offset
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
1275 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1283 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1284 inner
= TREE_OPERAND (inner
, 0);
1287 if (mem_expr
== NULL
)
1289 if (mem_expr
!= MEM_EXPR (mem
))
1291 set_mem_expr (mem
, mem_expr
);
1292 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1295 set_mem_alias_set (mem
, 0);
1296 set_mem_size (mem
, NULL_RTX
);
1302 /* Built-in functions to perform an untyped call and return. */
1304 #define apply_args_mode \
1305 (this_target_builtins->x_apply_args_mode)
1306 #define apply_result_mode \
1307 (this_target_builtins->x_apply_result_mode)
1309 /* Return the size required for the block returned by __builtin_apply_args,
1310 and initialize apply_args_mode. */
1313 apply_args_size (void)
1315 static int size
= -1;
1318 enum machine_mode mode
;
1320 /* The values computed by this function never change. */
1323 /* The first value is the incoming arg-pointer. */
1324 size
= GET_MODE_SIZE (Pmode
);
1326 /* The second value is the structure value address unless this is
1327 passed as an "invisible" first argument. */
1328 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1329 size
+= GET_MODE_SIZE (Pmode
);
1331 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1332 if (FUNCTION_ARG_REGNO_P (regno
))
1334 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1336 gcc_assert (mode
!= VOIDmode
);
1338 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1339 if (size
% align
!= 0)
1340 size
= CEIL (size
, align
) * align
;
1341 size
+= GET_MODE_SIZE (mode
);
1342 apply_args_mode
[regno
] = mode
;
1346 apply_args_mode
[regno
] = VOIDmode
;
1352 /* Return the size required for the block returned by __builtin_apply,
1353 and initialize apply_result_mode. */
1356 apply_result_size (void)
1358 static int size
= -1;
1360 enum machine_mode mode
;
1362 /* The values computed by this function never change. */
1367 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1368 if (targetm
.calls
.function_value_regno_p (regno
))
1370 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1372 gcc_assert (mode
!= VOIDmode
);
1374 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1375 if (size
% align
!= 0)
1376 size
= CEIL (size
, align
) * align
;
1377 size
+= GET_MODE_SIZE (mode
);
1378 apply_result_mode
[regno
] = mode
;
1381 apply_result_mode
[regno
] = VOIDmode
;
1383 /* Allow targets that use untyped_call and untyped_return to override
1384 the size so that machine-specific information can be stored here. */
1385 #ifdef APPLY_RESULT_SIZE
1386 size
= APPLY_RESULT_SIZE
;
1392 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1393 /* Create a vector describing the result block RESULT. If SAVEP is true,
1394 the result block is used to save the values; otherwise it is used to
1395 restore the values. */
1398 result_vector (int savep
, rtx result
)
1400 int regno
, size
, align
, nelts
;
1401 enum machine_mode mode
;
1403 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1406 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1407 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1409 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1410 if (size
% align
!= 0)
1411 size
= CEIL (size
, align
) * align
;
1412 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1413 mem
= adjust_address (result
, mode
, size
);
1414 savevec
[nelts
++] = (savep
1415 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1416 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1417 size
+= GET_MODE_SIZE (mode
);
1419 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1421 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1423 /* Save the state required to perform an untyped call with the same
1424 arguments as were passed to the current function. */
1427 expand_builtin_apply_args_1 (void)
1430 int size
, align
, regno
;
1431 enum machine_mode mode
;
1432 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1434 /* Create a block where the arg-pointer, structure value address,
1435 and argument registers can be saved. */
1436 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1438 /* Walk past the arg-pointer and structure value address. */
1439 size
= GET_MODE_SIZE (Pmode
);
1440 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1441 size
+= GET_MODE_SIZE (Pmode
);
1443 /* Save each register used in calling a function to the block. */
1444 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1445 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1447 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1448 if (size
% align
!= 0)
1449 size
= CEIL (size
, align
) * align
;
1451 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1453 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1454 size
+= GET_MODE_SIZE (mode
);
1457 /* Save the arg pointer to the block. */
1458 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1459 #ifdef STACK_GROWS_DOWNWARD
1460 /* We need the pointer as the caller actually passed them to us, not
1461 as we might have pretended they were passed. Make sure it's a valid
1462 operand, as emit_move_insn isn't expected to handle a PLUS. */
1464 = force_operand (plus_constant (tem
, crtl
->args
.pretend_args_size
),
1467 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1469 size
= GET_MODE_SIZE (Pmode
);
1471 /* Save the structure value address unless this is passed as an
1472 "invisible" first argument. */
1473 if (struct_incoming_value
)
1475 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1476 copy_to_reg (struct_incoming_value
));
1477 size
+= GET_MODE_SIZE (Pmode
);
1480 /* Return the address of the block. */
1481 return copy_addr_to_reg (XEXP (registers
, 0));
1484 /* __builtin_apply_args returns block of memory allocated on
1485 the stack into which is stored the arg pointer, structure
1486 value address, static chain, and all the registers that might
1487 possibly be used in performing a function call. The code is
1488 moved to the start of the function so the incoming values are
1492 expand_builtin_apply_args (void)
1494 /* Don't do __builtin_apply_args more than once in a function.
1495 Save the result of the first call and reuse it. */
1496 if (apply_args_value
!= 0)
1497 return apply_args_value
;
1499 /* When this function is called, it means that registers must be
1500 saved on entry to this function. So we migrate the
1501 call to the first insn of this function. */
1506 temp
= expand_builtin_apply_args_1 ();
1510 apply_args_value
= temp
;
1512 /* Put the insns after the NOTE that starts the function.
1513 If this is inside a start_sequence, make the outer-level insn
1514 chain current, so the code is placed at the start of the
1515 function. If internal_arg_pointer is a non-virtual pseudo,
1516 it needs to be placed after the function that initializes
1518 push_topmost_sequence ();
1519 if (REG_P (crtl
->args
.internal_arg_pointer
)
1520 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1521 emit_insn_before (seq
, parm_birth_insn
);
1523 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1524 pop_topmost_sequence ();
1529 /* Perform an untyped call and save the state required to perform an
1530 untyped return of whatever value was returned by the given function. */
1533 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1535 int size
, align
, regno
;
1536 enum machine_mode mode
;
1537 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1538 rtx old_stack_level
= 0;
1539 rtx call_fusage
= 0;
1540 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1542 arguments
= convert_memory_address (Pmode
, arguments
);
1544 /* Create a block where the return registers can be saved. */
1545 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1547 /* Fetch the arg pointer from the ARGUMENTS block. */
1548 incoming_args
= gen_reg_rtx (Pmode
);
1549 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1550 #ifndef STACK_GROWS_DOWNWARD
1551 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1552 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1555 /* Push a new argument block and copy the arguments. Do not allow
1556 the (potential) memcpy call below to interfere with our stack
1558 do_pending_stack_adjust ();
1561 /* Save the stack with nonlocal if available. */
1562 #ifdef HAVE_save_stack_nonlocal
1563 if (HAVE_save_stack_nonlocal
)
1564 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1567 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1569 /* Allocate a block of memory onto the stack and copy the memory
1570 arguments to the outgoing arguments address. We can pass TRUE
1571 as the 4th argument because we just saved the stack pointer
1572 and will restore it right after the call. */
1573 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1575 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1576 may have already set current_function_calls_alloca to true.
1577 current_function_calls_alloca won't be set if argsize is zero,
1578 so we have to guarantee need_drap is true here. */
1579 if (SUPPORTS_STACK_ALIGNMENT
)
1580 crtl
->need_drap
= true;
1582 dest
= virtual_outgoing_args_rtx
;
1583 #ifndef STACK_GROWS_DOWNWARD
1584 if (CONST_INT_P (argsize
))
1585 dest
= plus_constant (dest
, -INTVAL (argsize
));
1587 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1589 dest
= gen_rtx_MEM (BLKmode
, dest
);
1590 set_mem_align (dest
, PARM_BOUNDARY
);
1591 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1592 set_mem_align (src
, PARM_BOUNDARY
);
1593 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1595 /* Refer to the argument block. */
1597 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1598 set_mem_align (arguments
, PARM_BOUNDARY
);
1600 /* Walk past the arg-pointer and structure value address. */
1601 size
= GET_MODE_SIZE (Pmode
);
1603 size
+= GET_MODE_SIZE (Pmode
);
1605 /* Restore each of the registers previously saved. Make USE insns
1606 for each of these registers for use in making the call. */
1607 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1608 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1610 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1611 if (size
% align
!= 0)
1612 size
= CEIL (size
, align
) * align
;
1613 reg
= gen_rtx_REG (mode
, regno
);
1614 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1615 use_reg (&call_fusage
, reg
);
1616 size
+= GET_MODE_SIZE (mode
);
1619 /* Restore the structure value address unless this is passed as an
1620 "invisible" first argument. */
1621 size
= GET_MODE_SIZE (Pmode
);
1624 rtx value
= gen_reg_rtx (Pmode
);
1625 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1626 emit_move_insn (struct_value
, value
);
1627 if (REG_P (struct_value
))
1628 use_reg (&call_fusage
, struct_value
);
1629 size
+= GET_MODE_SIZE (Pmode
);
1632 /* All arguments and registers used for the call are set up by now! */
1633 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1635 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1636 and we don't want to load it into a register as an optimization,
1637 because prepare_call_address already did it if it should be done. */
1638 if (GET_CODE (function
) != SYMBOL_REF
)
1639 function
= memory_address (FUNCTION_MODE
, function
);
1641 /* Generate the actual call instruction and save the return value. */
1642 #ifdef HAVE_untyped_call
1643 if (HAVE_untyped_call
)
1644 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1645 result
, result_vector (1, result
)));
1648 #ifdef HAVE_call_value
1649 if (HAVE_call_value
)
1653 /* Locate the unique return register. It is not possible to
1654 express a call that sets more than one return register using
1655 call_value; use untyped_call for that. In fact, untyped_call
1656 only needs to save the return registers in the given block. */
1657 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1658 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1660 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1662 valreg
= gen_rtx_REG (mode
, regno
);
1665 emit_call_insn (GEN_CALL_VALUE (valreg
,
1666 gen_rtx_MEM (FUNCTION_MODE
, function
),
1667 const0_rtx
, NULL_RTX
, const0_rtx
));
1669 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1675 /* Find the CALL insn we just emitted, and attach the register usage
1677 call_insn
= last_call_insn ();
1678 add_function_usage_to (call_insn
, call_fusage
);
1680 /* Restore the stack. */
1681 #ifdef HAVE_save_stack_nonlocal
1682 if (HAVE_save_stack_nonlocal
)
1683 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1686 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1690 /* Return the address of the result block. */
1691 result
= copy_addr_to_reg (XEXP (result
, 0));
1692 return convert_memory_address (ptr_mode
, result
);
1695 /* Perform an untyped return. */
1698 expand_builtin_return (rtx result
)
1700 int size
, align
, regno
;
1701 enum machine_mode mode
;
1703 rtx call_fusage
= 0;
1705 result
= convert_memory_address (Pmode
, result
);
1707 apply_result_size ();
1708 result
= gen_rtx_MEM (BLKmode
, result
);
1710 #ifdef HAVE_untyped_return
1711 if (HAVE_untyped_return
)
1713 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1719 /* Restore the return value and note that each value is used. */
1721 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1722 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1724 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1725 if (size
% align
!= 0)
1726 size
= CEIL (size
, align
) * align
;
1727 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1728 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1730 push_to_sequence (call_fusage
);
1732 call_fusage
= get_insns ();
1734 size
+= GET_MODE_SIZE (mode
);
1737 /* Put the USE insns before the return. */
1738 emit_insn (call_fusage
);
1740 /* Return whatever values was restored by jumping directly to the end
1742 expand_naked_return ();
1745 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1747 static enum type_class
1748 type_to_class (tree type
)
1750 switch (TREE_CODE (type
))
1752 case VOID_TYPE
: return void_type_class
;
1753 case INTEGER_TYPE
: return integer_type_class
;
1754 case ENUMERAL_TYPE
: return enumeral_type_class
;
1755 case BOOLEAN_TYPE
: return boolean_type_class
;
1756 case POINTER_TYPE
: return pointer_type_class
;
1757 case REFERENCE_TYPE
: return reference_type_class
;
1758 case OFFSET_TYPE
: return offset_type_class
;
1759 case REAL_TYPE
: return real_type_class
;
1760 case COMPLEX_TYPE
: return complex_type_class
;
1761 case FUNCTION_TYPE
: return function_type_class
;
1762 case METHOD_TYPE
: return method_type_class
;
1763 case RECORD_TYPE
: return record_type_class
;
1765 case QUAL_UNION_TYPE
: return union_type_class
;
1766 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1767 ? string_type_class
: array_type_class
);
1768 case LANG_TYPE
: return lang_type_class
;
1769 default: return no_type_class
;
1773 /* Expand a call EXP to __builtin_classify_type. */
1776 expand_builtin_classify_type (tree exp
)
1778 if (call_expr_nargs (exp
))
1779 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1780 return GEN_INT (no_type_class
);
1783 /* This helper macro, meant to be used in mathfn_built_in below,
1784 determines which among a set of three builtin math functions is
1785 appropriate for a given type mode. The `F' and `L' cases are
1786 automatically generated from the `double' case. */
1787 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1788 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1789 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1790 fcodel = BUILT_IN_MATHFN##L ; break;
1791 /* Similar to above, but appends _R after any F/L suffix. */
1792 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1793 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1794 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1795 fcodel = BUILT_IN_MATHFN##L_R ; break;
1797 /* Return mathematic function equivalent to FN but operating directly
1798 on TYPE, if available. If IMPLICIT is true find the function in
1799 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1800 can't do the conversion, return zero. */
1803 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit
)
1805 tree
const *const fn_arr
1806 = implicit
? implicit_built_in_decls
: built_in_decls
;
1807 enum built_in_function fcode
, fcodef
, fcodel
;
1811 CASE_MATHFN (BUILT_IN_ACOS
)
1812 CASE_MATHFN (BUILT_IN_ACOSH
)
1813 CASE_MATHFN (BUILT_IN_ASIN
)
1814 CASE_MATHFN (BUILT_IN_ASINH
)
1815 CASE_MATHFN (BUILT_IN_ATAN
)
1816 CASE_MATHFN (BUILT_IN_ATAN2
)
1817 CASE_MATHFN (BUILT_IN_ATANH
)
1818 CASE_MATHFN (BUILT_IN_CBRT
)
1819 CASE_MATHFN (BUILT_IN_CEIL
)
1820 CASE_MATHFN (BUILT_IN_CEXPI
)
1821 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1822 CASE_MATHFN (BUILT_IN_COS
)
1823 CASE_MATHFN (BUILT_IN_COSH
)
1824 CASE_MATHFN (BUILT_IN_DREM
)
1825 CASE_MATHFN (BUILT_IN_ERF
)
1826 CASE_MATHFN (BUILT_IN_ERFC
)
1827 CASE_MATHFN (BUILT_IN_EXP
)
1828 CASE_MATHFN (BUILT_IN_EXP10
)
1829 CASE_MATHFN (BUILT_IN_EXP2
)
1830 CASE_MATHFN (BUILT_IN_EXPM1
)
1831 CASE_MATHFN (BUILT_IN_FABS
)
1832 CASE_MATHFN (BUILT_IN_FDIM
)
1833 CASE_MATHFN (BUILT_IN_FLOOR
)
1834 CASE_MATHFN (BUILT_IN_FMA
)
1835 CASE_MATHFN (BUILT_IN_FMAX
)
1836 CASE_MATHFN (BUILT_IN_FMIN
)
1837 CASE_MATHFN (BUILT_IN_FMOD
)
1838 CASE_MATHFN (BUILT_IN_FREXP
)
1839 CASE_MATHFN (BUILT_IN_GAMMA
)
1840 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1841 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1842 CASE_MATHFN (BUILT_IN_HYPOT
)
1843 CASE_MATHFN (BUILT_IN_ILOGB
)
1844 CASE_MATHFN (BUILT_IN_INF
)
1845 CASE_MATHFN (BUILT_IN_ISINF
)
1846 CASE_MATHFN (BUILT_IN_J0
)
1847 CASE_MATHFN (BUILT_IN_J1
)
1848 CASE_MATHFN (BUILT_IN_JN
)
1849 CASE_MATHFN (BUILT_IN_LCEIL
)
1850 CASE_MATHFN (BUILT_IN_LDEXP
)
1851 CASE_MATHFN (BUILT_IN_LFLOOR
)
1852 CASE_MATHFN (BUILT_IN_LGAMMA
)
1853 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1854 CASE_MATHFN (BUILT_IN_LLCEIL
)
1855 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1856 CASE_MATHFN (BUILT_IN_LLRINT
)
1857 CASE_MATHFN (BUILT_IN_LLROUND
)
1858 CASE_MATHFN (BUILT_IN_LOG
)
1859 CASE_MATHFN (BUILT_IN_LOG10
)
1860 CASE_MATHFN (BUILT_IN_LOG1P
)
1861 CASE_MATHFN (BUILT_IN_LOG2
)
1862 CASE_MATHFN (BUILT_IN_LOGB
)
1863 CASE_MATHFN (BUILT_IN_LRINT
)
1864 CASE_MATHFN (BUILT_IN_LROUND
)
1865 CASE_MATHFN (BUILT_IN_MODF
)
1866 CASE_MATHFN (BUILT_IN_NAN
)
1867 CASE_MATHFN (BUILT_IN_NANS
)
1868 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1869 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1870 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1871 CASE_MATHFN (BUILT_IN_POW
)
1872 CASE_MATHFN (BUILT_IN_POWI
)
1873 CASE_MATHFN (BUILT_IN_POW10
)
1874 CASE_MATHFN (BUILT_IN_REMAINDER
)
1875 CASE_MATHFN (BUILT_IN_REMQUO
)
1876 CASE_MATHFN (BUILT_IN_RINT
)
1877 CASE_MATHFN (BUILT_IN_ROUND
)
1878 CASE_MATHFN (BUILT_IN_SCALB
)
1879 CASE_MATHFN (BUILT_IN_SCALBLN
)
1880 CASE_MATHFN (BUILT_IN_SCALBN
)
1881 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1882 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1883 CASE_MATHFN (BUILT_IN_SIN
)
1884 CASE_MATHFN (BUILT_IN_SINCOS
)
1885 CASE_MATHFN (BUILT_IN_SINH
)
1886 CASE_MATHFN (BUILT_IN_SQRT
)
1887 CASE_MATHFN (BUILT_IN_TAN
)
1888 CASE_MATHFN (BUILT_IN_TANH
)
1889 CASE_MATHFN (BUILT_IN_TGAMMA
)
1890 CASE_MATHFN (BUILT_IN_TRUNC
)
1891 CASE_MATHFN (BUILT_IN_Y0
)
1892 CASE_MATHFN (BUILT_IN_Y1
)
1893 CASE_MATHFN (BUILT_IN_YN
)
1899 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1900 return fn_arr
[fcode
];
1901 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1902 return fn_arr
[fcodef
];
1903 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1904 return fn_arr
[fcodel
];
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1912 mathfn_built_in (tree type
, enum built_in_function fn
)
1914 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1922 expand_errno_check (tree exp
, rtx target
)
1924 rtx lab
= gen_label_rtx ();
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1929 NULL_RTX
, NULL_RTX
, lab
,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx
= GEN_ERRNO_RTX
;
1941 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1943 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp
) = 0;
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1955 expand_call (exp
, target
, 0);
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1967 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1969 optab builtin_optab
;
1971 tree fndecl
= get_callee_fndecl (exp
);
1972 enum machine_mode mode
;
1973 bool errno_set
= false;
1976 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1979 arg
= CALL_EXPR_ARG (exp
, 0);
1981 switch (DECL_FUNCTION_CODE (fndecl
))
1983 CASE_FLT_FN (BUILT_IN_SQRT
):
1984 errno_set
= ! tree_expr_nonnegative_p (arg
);
1985 builtin_optab
= sqrt_optab
;
1987 CASE_FLT_FN (BUILT_IN_EXP
):
1988 errno_set
= true; builtin_optab
= exp_optab
; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10
):
1990 CASE_FLT_FN (BUILT_IN_POW10
):
1991 errno_set
= true; builtin_optab
= exp10_optab
; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2
):
1993 errno_set
= true; builtin_optab
= exp2_optab
; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1
):
1995 errno_set
= true; builtin_optab
= expm1_optab
; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB
):
1997 errno_set
= true; builtin_optab
= logb_optab
; break;
1998 CASE_FLT_FN (BUILT_IN_LOG
):
1999 errno_set
= true; builtin_optab
= log_optab
; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10
):
2001 errno_set
= true; builtin_optab
= log10_optab
; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2
):
2003 errno_set
= true; builtin_optab
= log2_optab
; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P
):
2005 errno_set
= true; builtin_optab
= log1p_optab
; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN
):
2007 builtin_optab
= asin_optab
; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS
):
2009 builtin_optab
= acos_optab
; break;
2010 CASE_FLT_FN (BUILT_IN_TAN
):
2011 builtin_optab
= tan_optab
; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN
):
2013 builtin_optab
= atan_optab
; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR
):
2015 builtin_optab
= floor_optab
; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL
):
2017 builtin_optab
= ceil_optab
; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC
):
2019 builtin_optab
= btrunc_optab
; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND
):
2021 builtin_optab
= round_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2023 builtin_optab
= nearbyint_optab
;
2024 if (flag_trapping_math
)
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT
):
2028 builtin_optab
= rint_optab
; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2030 builtin_optab
= significand_optab
; break;
2035 /* Make a suitable register to place result in. */
2036 mode
= TYPE_MODE (TREE_TYPE (exp
));
2038 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2043 && (!errno_set
|| !optimize_insn_for_size_p ()))
2045 target
= gen_reg_rtx (mode
);
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2052 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2056 /* Compute into TARGET.
2057 Set TARGET to wherever the result comes back. */
2058 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2063 expand_errno_check (exp
, target
);
2065 /* Output the entire sequence. */
2066 insns
= get_insns ();
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2078 return expand_call (exp
, target
, target
== const0_rtx
);
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2089 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2091 optab builtin_optab
;
2092 rtx op0
, op1
, insns
;
2093 int op1_type
= REAL_TYPE
;
2094 tree fndecl
= get_callee_fndecl (exp
);
2096 enum machine_mode mode
;
2097 bool errno_set
= true;
2099 switch (DECL_FUNCTION_CODE (fndecl
))
2101 CASE_FLT_FN (BUILT_IN_SCALBN
):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2103 CASE_FLT_FN (BUILT_IN_LDEXP
):
2104 op1_type
= INTEGER_TYPE
;
2109 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2112 arg0
= CALL_EXPR_ARG (exp
, 0);
2113 arg1
= CALL_EXPR_ARG (exp
, 1);
2115 switch (DECL_FUNCTION_CODE (fndecl
))
2117 CASE_FLT_FN (BUILT_IN_POW
):
2118 builtin_optab
= pow_optab
; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2
):
2120 builtin_optab
= atan2_optab
; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB
):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2124 builtin_optab
= scalb_optab
; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN
):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP
):
2131 builtin_optab
= ldexp_optab
; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD
):
2133 builtin_optab
= fmod_optab
; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2135 CASE_FLT_FN (BUILT_IN_DREM
):
2136 builtin_optab
= remainder_optab
; break;
2141 /* Make a suitable register to place result in. */
2142 mode
= TYPE_MODE (TREE_TYPE (exp
));
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2148 target
= gen_reg_rtx (mode
);
2150 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2153 if (errno_set
&& optimize_insn_for_size_p ())
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2158 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2160 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2161 op1
= expand_normal (arg1
);
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2168 target
, 0, OPTAB_DIRECT
);
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2176 return expand_call (exp
, target
, target
== const0_rtx
);
2180 expand_errno_check (exp
, target
);
2182 /* Output the entire sequence. */
2183 insns
= get_insns ();
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2198 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2200 optab builtin_optab
;
2201 rtx op0
, op1
, op2
, insns
;
2202 tree fndecl
= get_callee_fndecl (exp
);
2203 tree arg0
, arg1
, arg2
;
2204 enum machine_mode mode
;
2206 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2209 arg0
= CALL_EXPR_ARG (exp
, 0);
2210 arg1
= CALL_EXPR_ARG (exp
, 1);
2211 arg2
= CALL_EXPR_ARG (exp
, 2);
2213 switch (DECL_FUNCTION_CODE (fndecl
))
2215 CASE_FLT_FN (BUILT_IN_FMA
):
2216 builtin_optab
= fma_optab
; break;
2221 /* Make a suitable register to place result in. */
2222 mode
= TYPE_MODE (TREE_TYPE (exp
));
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2228 target
= gen_reg_rtx (mode
);
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2232 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2233 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2235 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2236 op1
= expand_normal (arg1
);
2237 op2
= expand_normal (arg2
);
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 target
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2252 return expand_call (exp
, target
, target
== const0_rtx
);
2255 /* Output the entire sequence. */
2256 insns
= get_insns ();
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2271 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2273 optab builtin_optab
;
2275 tree fndecl
= get_callee_fndecl (exp
);
2276 enum machine_mode mode
;
2279 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2282 arg
= CALL_EXPR_ARG (exp
, 0);
2284 switch (DECL_FUNCTION_CODE (fndecl
))
2286 CASE_FLT_FN (BUILT_IN_SIN
):
2287 CASE_FLT_FN (BUILT_IN_COS
):
2288 builtin_optab
= sincos_optab
; break;
2293 /* Make a suitable register to place result in. */
2294 mode
= TYPE_MODE (TREE_TYPE (exp
));
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2299 switch (DECL_FUNCTION_CODE (fndecl
))
2301 CASE_FLT_FN (BUILT_IN_SIN
):
2302 builtin_optab
= sin_optab
; break;
2303 CASE_FLT_FN (BUILT_IN_COS
):
2304 builtin_optab
= cos_optab
; break;
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2312 target
= gen_reg_rtx (mode
);
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2319 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2323 /* Compute into TARGET.
2324 Set TARGET to wherever the result comes back. */
2325 if (builtin_optab
== sincos_optab
)
2329 switch (DECL_FUNCTION_CODE (fndecl
))
2331 CASE_FLT_FN (BUILT_IN_SIN
):
2332 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2334 CASE_FLT_FN (BUILT_IN_COS
):
2335 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2340 gcc_assert (result
);
2344 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2349 /* Output the entire sequence. */
2350 insns
= get_insns ();
2356 /* If we were unable to expand via the builtin, stop the sequence
2357 (without outputting the insns) and call to the library function
2358 with the stabilized argument list. */
2362 target
= expand_call (exp
, target
, target
== const0_rtx
);
2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2368 return an RTL instruction code that implements the functionality.
2369 If that isn't possible or available return CODE_FOR_nothing. */
2371 static enum insn_code
2372 interclass_mathfn_icode (tree arg
, tree fndecl
)
2374 bool errno_set
= false;
2375 optab builtin_optab
= 0;
2376 enum machine_mode mode
;
2378 switch (DECL_FUNCTION_CODE (fndecl
))
2380 CASE_FLT_FN (BUILT_IN_ILOGB
):
2381 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2382 CASE_FLT_FN (BUILT_IN_ISINF
):
2383 builtin_optab
= isinf_optab
; break;
2384 case BUILT_IN_ISNORMAL
:
2385 case BUILT_IN_ISFINITE
:
2386 CASE_FLT_FN (BUILT_IN_FINITE
):
2387 case BUILT_IN_FINITED32
:
2388 case BUILT_IN_FINITED64
:
2389 case BUILT_IN_FINITED128
:
2390 case BUILT_IN_ISINFD32
:
2391 case BUILT_IN_ISINFD64
:
2392 case BUILT_IN_ISINFD128
:
2393 /* These builtins have no optabs (yet). */
2399 /* There's no easy way to detect the case we need to set EDOM. */
2400 if (flag_errno_math
&& errno_set
)
2401 return CODE_FOR_nothing
;
2403 /* Optab mode depends on the mode of the input argument. */
2404 mode
= TYPE_MODE (TREE_TYPE (arg
));
2407 return optab_handler (builtin_optab
, mode
);
2408 return CODE_FOR_nothing
;
2411 /* Expand a call to one of the builtin math functions that operate on
2412 floating point argument and output an integer result (ilogb, isinf,
2414 Return 0 if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2416 function; if convenient, the result should be placed in TARGET. */
2419 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2421 enum insn_code icode
= CODE_FOR_nothing
;
2423 tree fndecl
= get_callee_fndecl (exp
);
2424 enum machine_mode mode
;
2427 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2430 arg
= CALL_EXPR_ARG (exp
, 0);
2431 icode
= interclass_mathfn_icode (arg
, fndecl
);
2432 mode
= TYPE_MODE (TREE_TYPE (arg
));
2434 if (icode
!= CODE_FOR_nothing
)
2436 rtx last
= get_last_insn ();
2437 tree orig_arg
= arg
;
2438 /* Make a suitable register to place result in. */
2440 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
))
2441 || !insn_data
[icode
].operand
[0].predicate (target
, GET_MODE (target
)))
2442 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2444 gcc_assert (insn_data
[icode
].operand
[0].predicate
2445 (target
, GET_MODE (target
)));
2447 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2448 need to expand the argument again. This way, we will not perform
2449 side-effects more the once. */
2450 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2452 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2454 if (mode
!= GET_MODE (op0
))
2455 op0
= convert_to_mode (mode
, op0
, 0);
2457 /* Compute into TARGET.
2458 Set TARGET to wherever the result comes back. */
2459 if (maybe_emit_unop_insn (icode
, target
, op0
, UNKNOWN
))
2461 delete_insns_since (last
);
2462 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2468 /* Expand a call to the builtin sincos math function.
2469 Return NULL_RTX if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2474 expand_builtin_sincos (tree exp
)
2476 rtx op0
, op1
, op2
, target1
, target2
;
2477 enum machine_mode mode
;
2478 tree arg
, sinp
, cosp
;
2480 location_t loc
= EXPR_LOCATION (exp
);
2481 tree alias_type
, alias_off
;
2483 if (!validate_arglist (exp
, REAL_TYPE
,
2484 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2487 arg
= CALL_EXPR_ARG (exp
, 0);
2488 sinp
= CALL_EXPR_ARG (exp
, 1);
2489 cosp
= CALL_EXPR_ARG (exp
, 2);
2491 /* Make a suitable register to place result in. */
2492 mode
= TYPE_MODE (TREE_TYPE (arg
));
2494 /* Check if sincos insn is available, otherwise emit the call. */
2495 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2498 target1
= gen_reg_rtx (mode
);
2499 target2
= gen_reg_rtx (mode
);
2501 op0
= expand_normal (arg
);
2502 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2503 alias_off
= build_int_cst (alias_type
, 0);
2504 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2506 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2509 /* Compute into target1 and target2.
2510 Set TARGET to wherever the result comes back. */
2511 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2512 gcc_assert (result
);
2514 /* Move target1 and target2 to the memory locations indicated
2516 emit_move_insn (op1
, target1
);
2517 emit_move_insn (op2
, target2
);
2522 /* Expand a call to the internal cexpi builtin to the sincos math function.
2523 EXP is the expression that is a call to the builtin function; if convenient,
2524 the result should be placed in TARGET. */
2527 expand_builtin_cexpi (tree exp
, rtx target
)
2529 tree fndecl
= get_callee_fndecl (exp
);
2531 enum machine_mode mode
;
2533 location_t loc
= EXPR_LOCATION (exp
);
2535 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2538 arg
= CALL_EXPR_ARG (exp
, 0);
2539 type
= TREE_TYPE (arg
);
2540 mode
= TYPE_MODE (TREE_TYPE (arg
));
2542 /* Try expanding via a sincos optab, fall back to emitting a libcall
2543 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2544 is only generated from sincos, cexp or if we have either of them. */
2545 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2547 op1
= gen_reg_rtx (mode
);
2548 op2
= gen_reg_rtx (mode
);
2550 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2552 /* Compute into op1 and op2. */
2553 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2555 else if (TARGET_HAS_SINCOS
)
2557 tree call
, fn
= NULL_TREE
;
2561 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2562 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2563 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2564 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2565 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2566 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2570 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2571 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2572 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2573 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2574 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2575 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2577 /* Make sure not to fold the sincos call again. */
2578 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2579 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2580 call
, 3, arg
, top1
, top2
));
2584 tree call
, fn
= NULL_TREE
, narg
;
2585 tree ctype
= build_complex_type (type
);
2587 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2588 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2589 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2590 fn
= built_in_decls
[BUILT_IN_CEXP
];
2591 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2592 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2596 /* If we don't have a decl for cexp create one. This is the
2597 friendliest fallback if the user calls __builtin_cexpi
2598 without full target C99 function support. */
2599 if (fn
== NULL_TREE
)
2602 const char *name
= NULL
;
2604 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2606 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2608 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2611 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2612 fn
= build_fn_decl (name
, fntype
);
2615 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2616 build_real (type
, dconst0
), arg
);
2618 /* Make sure not to fold the cexp call again. */
2619 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2620 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2621 target
, VOIDmode
, EXPAND_NORMAL
);
2624 /* Now build the proper return type. */
2625 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2626 make_tree (TREE_TYPE (arg
), op2
),
2627 make_tree (TREE_TYPE (arg
), op1
)),
2628 target
, VOIDmode
, EXPAND_NORMAL
);
2631 /* Conveniently construct a function call expression. FNDECL names the
2632 function to be called, N is the number of arguments, and the "..."
2633 parameters are the argument expressions. Unlike build_call_exr
2634 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2637 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2640 tree fntype
= TREE_TYPE (fndecl
);
2641 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2644 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2646 SET_EXPR_LOCATION (fn
, loc
);
2650 /* Expand a call to one of the builtin rounding functions gcc defines
2651 as an extension (lfloor and lceil). As these are gcc extensions we
2652 do not need to worry about setting errno to EDOM.
2653 If expanding via optab fails, lower expression to (int)(floor(x)).
2654 EXP is the expression that is a call to the builtin function;
2655 if convenient, the result should be placed in TARGET. */
2658 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2660 convert_optab builtin_optab
;
2661 rtx op0
, insns
, tmp
;
2662 tree fndecl
= get_callee_fndecl (exp
);
2663 enum built_in_function fallback_fn
;
2664 tree fallback_fndecl
;
2665 enum machine_mode mode
;
2668 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2671 arg
= CALL_EXPR_ARG (exp
, 0);
2673 switch (DECL_FUNCTION_CODE (fndecl
))
2675 CASE_FLT_FN (BUILT_IN_LCEIL
):
2676 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2677 builtin_optab
= lceil_optab
;
2678 fallback_fn
= BUILT_IN_CEIL
;
2681 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2682 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2683 builtin_optab
= lfloor_optab
;
2684 fallback_fn
= BUILT_IN_FLOOR
;
2691 /* Make a suitable register to place result in. */
2692 mode
= TYPE_MODE (TREE_TYPE (exp
));
2694 target
= gen_reg_rtx (mode
);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2701 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2705 /* Compute into TARGET. */
2706 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2708 /* Output the entire sequence. */
2709 insns
= get_insns ();
2715 /* If we were unable to expand via the builtin, stop the sequence
2716 (without outputting the insns). */
2719 /* Fall back to floating point rounding optab. */
2720 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2722 /* For non-C99 targets we may end up without a fallback fndecl here
2723 if the user called __builtin_lfloor directly. In this case emit
2724 a call to the floor/ceil variants nevertheless. This should result
2725 in the best user experience for not full C99 targets. */
2726 if (fallback_fndecl
== NULL_TREE
)
2729 const char *name
= NULL
;
2731 switch (DECL_FUNCTION_CODE (fndecl
))
2733 case BUILT_IN_LCEIL
:
2734 case BUILT_IN_LLCEIL
:
2737 case BUILT_IN_LCEILF
:
2738 case BUILT_IN_LLCEILF
:
2741 case BUILT_IN_LCEILL
:
2742 case BUILT_IN_LLCEILL
:
2745 case BUILT_IN_LFLOOR
:
2746 case BUILT_IN_LLFLOOR
:
2749 case BUILT_IN_LFLOORF
:
2750 case BUILT_IN_LLFLOORF
:
2753 case BUILT_IN_LFLOORL
:
2754 case BUILT_IN_LLFLOORL
:
2761 fntype
= build_function_type_list (TREE_TYPE (arg
),
2762 TREE_TYPE (arg
), NULL_TREE
);
2763 fallback_fndecl
= build_fn_decl (name
, fntype
);
2766 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2768 tmp
= expand_normal (exp
);
2770 /* Truncate the result of floating point optab to integer
2771 via expand_fix (). */
2772 target
= gen_reg_rtx (mode
);
2773 expand_fix (target
, tmp
, 0);
2778 /* Expand a call to one of the builtin math functions doing integer
2780 Return 0 if a normal call should be emitted rather than expanding the
2781 function in-line. EXP is the expression that is a call to the builtin
2782 function; if convenient, the result should be placed in TARGET. */
2785 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2787 convert_optab builtin_optab
;
2789 tree fndecl
= get_callee_fndecl (exp
);
2791 enum machine_mode mode
;
2793 /* There's no easy way to detect the case we need to set EDOM. */
2794 if (flag_errno_math
)
2797 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2800 arg
= CALL_EXPR_ARG (exp
, 0);
2802 switch (DECL_FUNCTION_CODE (fndecl
))
2804 CASE_FLT_FN (BUILT_IN_LRINT
):
2805 CASE_FLT_FN (BUILT_IN_LLRINT
):
2806 builtin_optab
= lrint_optab
; break;
2807 CASE_FLT_FN (BUILT_IN_LROUND
):
2808 CASE_FLT_FN (BUILT_IN_LLROUND
):
2809 builtin_optab
= lround_optab
; break;
2814 /* Make a suitable register to place result in. */
2815 mode
= TYPE_MODE (TREE_TYPE (exp
));
2817 target
= gen_reg_rtx (mode
);
2819 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2820 need to expand the argument again. This way, we will not perform
2821 side-effects more the once. */
2822 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2824 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2828 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2830 /* Output the entire sequence. */
2831 insns
= get_insns ();
2837 /* If we were unable to expand via the builtin, stop the sequence
2838 (without outputting the insns) and call to the library function
2839 with the stabilized argument list. */
2842 target
= expand_call (exp
, target
, target
== const0_rtx
);
2847 /* To evaluate powi(x,n), the floating point value x raised to the
2848 constant integer exponent n, we use a hybrid algorithm that
2849 combines the "window method" with look-up tables. For an
2850 introduction to exponentiation algorithms and "addition chains",
2851 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2852 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2853 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2854 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2856 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2857 multiplications to inline before calling the system library's pow
2858 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2859 so this default never requires calling pow, powf or powl. */
2861 #ifndef POWI_MAX_MULTS
2862 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2865 /* The size of the "optimal power tree" lookup table. All
2866 exponents less than this value are simply looked up in the
2867 powi_table below. This threshold is also used to size the
2868 cache of pseudo registers that hold intermediate results. */
2869 #define POWI_TABLE_SIZE 256
2871 /* The size, in bits of the window, used in the "window method"
2872 exponentiation algorithm. This is equivalent to a radix of
2873 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2874 #define POWI_WINDOW_SIZE 3
2876 /* The following table is an efficient representation of an
2877 "optimal power tree". For each value, i, the corresponding
2878 value, j, in the table states than an optimal evaluation
2879 sequence for calculating pow(x,i) can be found by evaluating
2880 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2881 100 integers is given in Knuth's "Seminumerical algorithms". */
2883 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2885 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2886 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2887 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2888 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2889 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2890 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2891 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2892 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2893 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2894 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2895 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2896 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2897 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2898 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2899 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2900 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2901 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2902 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2903 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2904 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2905 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2906 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2907 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2908 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2909 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2910 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2911 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2912 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2913 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2914 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2915 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2916 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2920 /* Return the number of multiplications required to calculate
2921 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2922 subroutine of powi_cost. CACHE is an array indicating
2923 which exponents have already been calculated. */
2926 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2928 /* If we've already calculated this exponent, then this evaluation
2929 doesn't require any additional multiplications. */
2934 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2935 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2938 /* Return the number of multiplications required to calculate
2939 powi(x,n) for an arbitrary x, given the exponent N. This
2940 function needs to be kept in sync with expand_powi below. */
2943 powi_cost (HOST_WIDE_INT n
)
2945 bool cache
[POWI_TABLE_SIZE
];
2946 unsigned HOST_WIDE_INT digit
;
2947 unsigned HOST_WIDE_INT val
;
2953 /* Ignore the reciprocal when calculating the cost. */
2954 val
= (n
< 0) ? -n
: n
;
2956 /* Initialize the exponent cache. */
2957 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2962 while (val
>= POWI_TABLE_SIZE
)
2966 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2967 result
+= powi_lookup_cost (digit
, cache
)
2968 + POWI_WINDOW_SIZE
+ 1;
2969 val
>>= POWI_WINDOW_SIZE
;
2978 return result
+ powi_lookup_cost (val
, cache
);
2981 /* Recursive subroutine of expand_powi. This function takes the array,
2982 CACHE, of already calculated exponents and an exponent N and returns
2983 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2986 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2988 unsigned HOST_WIDE_INT digit
;
2992 if (n
< POWI_TABLE_SIZE
)
2997 target
= gen_reg_rtx (mode
);
3000 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
3001 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
3005 target
= gen_reg_rtx (mode
);
3006 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
3007 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
3008 op1
= expand_powi_1 (mode
, digit
, cache
);
3012 target
= gen_reg_rtx (mode
);
3013 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
3017 result
= expand_mult (mode
, op0
, op1
, target
, 0);
3018 if (result
!= target
)
3019 emit_move_insn (target
, result
);
3023 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3024 floating point operand in mode MODE, and N is the exponent. This
3025 function needs to be kept in sync with powi_cost above. */
3028 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
3030 rtx cache
[POWI_TABLE_SIZE
];
3034 return CONST1_RTX (mode
);
3036 memset (cache
, 0, sizeof (cache
));
3039 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
3041 /* If the original exponent was negative, reciprocate the result. */
3043 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3044 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3049 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3050 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3051 if we can simplify it. */
3053 expand_builtin_pow_root (location_t loc
, tree arg0
, tree arg1
, tree type
,
3056 if (TREE_CODE (arg1
) == REAL_CST
3057 && !TREE_OVERFLOW (arg1
)
3058 && flag_unsafe_math_optimizations
)
3060 enum machine_mode mode
= TYPE_MODE (type
);
3061 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
3062 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
3063 REAL_VALUE_TYPE c
= TREE_REAL_CST (arg1
);
3064 tree op
= NULL_TREE
;
3068 /* Optimize pow (x, 0.5) into sqrt. */
3069 if (REAL_VALUES_EQUAL (c
, dconsthalf
))
3070 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, arg0
);
3074 REAL_VALUE_TYPE dconst1_4
= dconst1
;
3075 REAL_VALUE_TYPE dconst3_4
;
3076 SET_REAL_EXP (&dconst1_4
, REAL_EXP (&dconst1_4
) - 2);
3078 real_from_integer (&dconst3_4
, VOIDmode
, 3, 0, 0);
3079 SET_REAL_EXP (&dconst3_4
, REAL_EXP (&dconst3_4
) - 2);
3081 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3082 machines that a builtin sqrt instruction is smaller than a
3083 call to pow with 0.25, so do this optimization even if
3085 if (REAL_VALUES_EQUAL (c
, dconst1_4
))
3087 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, arg0
);
3088 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, op
);
3091 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3092 are optimizing for space. */
3093 else if (optimize_insn_for_speed_p ()
3094 && !TREE_SIDE_EFFECTS (arg0
)
3095 && REAL_VALUES_EQUAL (c
, dconst3_4
))
3097 tree sqrt1
= build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
3098 tree sqrt2
= builtin_save_expr (sqrt1
);
3099 tree sqrt3
= build_call_expr_loc (loc
, sqrtfn
, 1, sqrt1
);
3100 op
= fold_build2_loc (loc
, MULT_EXPR
, type
, sqrt2
, sqrt3
);
3105 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3106 cbrt/sqrts instead of pow (x, 1./6.). */
3108 && (tree_expr_nonnegative_p (arg0
) || !HONOR_NANS (mode
)))
3110 /* First try 1/3. */
3111 REAL_VALUE_TYPE dconst1_3
3112 = real_value_truncate (mode
, dconst_third ());
3114 if (REAL_VALUES_EQUAL (c
, dconst1_3
))
3115 op
= build_call_nofold_loc (loc
, cbrtfn
, 1, arg0
);
3118 else if (optimize_insn_for_speed_p ())
3120 REAL_VALUE_TYPE dconst1_6
= dconst1_3
;
3121 SET_REAL_EXP (&dconst1_6
, REAL_EXP (&dconst1_6
) - 1);
3123 if (REAL_VALUES_EQUAL (c
, dconst1_6
))
3125 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, arg0
);
3126 op
= build_call_nofold_loc (loc
, cbrtfn
, 1, op
);
3132 return expand_expr (op
, subtarget
, mode
, EXPAND_NORMAL
);
3138 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3139 a normal call should be emitted rather than expanding the function
3140 in-line. EXP is the expression that is a call to the builtin
3141 function; if convenient, the result should be placed in TARGET. */
3144 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
3148 tree type
= TREE_TYPE (exp
);
3149 REAL_VALUE_TYPE cint
, c
, c2
;
3152 enum machine_mode mode
= TYPE_MODE (type
);
3154 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
3157 arg0
= CALL_EXPR_ARG (exp
, 0);
3158 arg1
= CALL_EXPR_ARG (exp
, 1);
3160 if (TREE_CODE (arg1
) != REAL_CST
3161 || TREE_OVERFLOW (arg1
))
3162 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3164 /* Handle constant exponents. */
3166 /* For integer valued exponents we can expand to an optimal multiplication
3167 sequence using expand_powi. */
3168 c
= TREE_REAL_CST (arg1
);
3169 n
= real_to_integer (&c
);
3170 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3171 if (real_identical (&c
, &cint
)
3172 && ((n
>= -1 && n
<= 2)
3173 || (flag_unsafe_math_optimizations
3174 && optimize_insn_for_speed_p ()
3175 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3177 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3180 op
= force_reg (mode
, op
);
3181 op
= expand_powi (op
, mode
, n
);
3186 narg0
= builtin_save_expr (arg0
);
3188 /* If the exponent is not integer valued, check if it is half of an integer.
3189 In this case we can expand to sqrt (x) * x**(n/2). */
3190 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
3191 if (fn
!= NULL_TREE
)
3193 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
3194 n
= real_to_integer (&c2
);
3195 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3196 if (real_identical (&c2
, &cint
)
3197 && ((flag_unsafe_math_optimizations
3198 && optimize_insn_for_speed_p ()
3199 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
3200 /* Even the c == 0.5 case cannot be done unconditionally
3201 when we need to preserve signed zeros, as
3202 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3203 || (!HONOR_SIGNED_ZEROS (mode
) && n
== 1)
3204 /* For c == 1.5 we can assume that x * sqrt (x) is always
3205 smaller than pow (x, 1.5) if sqrt will not be expanded
3208 && optab_handler (sqrt_optab
, mode
) != CODE_FOR_nothing
)))
3210 tree call_expr
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 1,
3212 /* Use expand_expr in case the newly built call expression
3213 was folded to a non-call. */
3214 op
= expand_expr (call_expr
, subtarget
, mode
, EXPAND_NORMAL
);
3217 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3218 op2
= force_reg (mode
, op2
);
3219 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
3220 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3221 0, OPTAB_LIB_WIDEN
);
3222 /* If the original exponent was negative, reciprocate the
3225 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3226 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3232 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3234 op
= expand_builtin_pow_root (EXPR_LOCATION (exp
), arg0
, arg1
, type
,
3239 /* Try if the exponent is a third of an integer. In this case
3240 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3241 different from pow (x, 1./3.) due to rounding and behavior
3242 with negative x we need to constrain this transformation to
3243 unsafe math and positive x or finite math. */
3244 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
3246 && flag_unsafe_math_optimizations
3247 && (tree_expr_nonnegative_p (arg0
)
3248 || !HONOR_NANS (mode
)))
3250 REAL_VALUE_TYPE dconst3
;
3251 real_from_integer (&dconst3
, VOIDmode
, 3, 0, 0);
3252 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
3253 real_round (&c2
, mode
, &c2
);
3254 n
= real_to_integer (&c2
);
3255 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3256 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
3257 real_convert (&c2
, mode
, &c2
);
3258 if (real_identical (&c2
, &c
)
3259 && ((optimize_insn_for_speed_p ()
3260 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
3263 tree call_expr
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 1,
3265 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
3266 if (abs (n
) % 3 == 2)
3267 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
3268 0, OPTAB_LIB_WIDEN
);
3271 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3272 op2
= force_reg (mode
, op2
);
3273 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
3274 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3275 0, OPTAB_LIB_WIDEN
);
3276 /* If the original exponent was negative, reciprocate the
3279 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3280 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3286 /* Fall back to optab expansion. */
3287 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3290 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3291 a normal call should be emitted rather than expanding the function
3292 in-line. EXP is the expression that is a call to the builtin
3293 function; if convenient, the result should be placed in TARGET. */
3296 expand_builtin_powi (tree exp
, rtx target
)
3300 enum machine_mode mode
;
3301 enum machine_mode mode2
;
3303 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3306 arg0
= CALL_EXPR_ARG (exp
, 0);
3307 arg1
= CALL_EXPR_ARG (exp
, 1);
3308 mode
= TYPE_MODE (TREE_TYPE (exp
));
3310 /* Handle constant power. */
3312 if (TREE_CODE (arg1
) == INTEGER_CST
3313 && !TREE_OVERFLOW (arg1
))
3315 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3317 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3318 Otherwise, check the number of multiplications required. */
3319 if ((TREE_INT_CST_HIGH (arg1
) == 0
3320 || TREE_INT_CST_HIGH (arg1
) == -1)
3321 && ((n
>= -1 && n
<= 2)
3322 || (optimize_insn_for_speed_p ()
3323 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3325 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
3326 op0
= force_reg (mode
, op0
);
3327 return expand_powi (op0
, mode
, n
);
3331 /* Emit a libcall to libgcc. */
3333 /* Mode of the 2nd argument must match that of an int. */
3334 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3336 if (target
== NULL_RTX
)
3337 target
= gen_reg_rtx (mode
);
3339 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
3340 if (GET_MODE (op0
) != mode
)
3341 op0
= convert_to_mode (mode
, op0
, 0);
3342 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3343 if (GET_MODE (op1
) != mode2
)
3344 op1
= convert_to_mode (mode2
, op1
, 0);
3346 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3347 target
, LCT_CONST
, mode
, 2,
3348 op0
, mode
, op1
, mode2
);
3353 /* Expand expression EXP which is a call to the strlen builtin. Return
3354 NULL_RTX if we failed the caller should emit a normal call, otherwise
3355 try to get the result in TARGET, if convenient. */
3358 expand_builtin_strlen (tree exp
, rtx target
,
3359 enum machine_mode target_mode
)
3361 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3367 tree src
= CALL_EXPR_ARG (exp
, 0);
3368 rtx result
, src_reg
, char_rtx
, before_strlen
;
3369 enum machine_mode insn_mode
= target_mode
, char_mode
;
3370 enum insn_code icode
= CODE_FOR_nothing
;
3373 /* If the length can be computed at compile-time, return it. */
3374 len
= c_strlen (src
, 0);
3376 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3378 /* If the length can be computed at compile-time and is constant
3379 integer, but there are side-effects in src, evaluate
3380 src for side-effects, then return len.
3381 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3382 can be optimized into: i++; x = 3; */
3383 len
= c_strlen (src
, 1);
3384 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3386 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3387 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3390 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3392 /* If SRC is not a pointer type, don't do this operation inline. */
3396 /* Bail out if we can't compute strlen in the right mode. */
3397 while (insn_mode
!= VOIDmode
)
3399 icode
= optab_handler (strlen_optab
, insn_mode
);
3400 if (icode
!= CODE_FOR_nothing
)
3403 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3405 if (insn_mode
== VOIDmode
)
3408 /* Make a place to write the result of the instruction. */
3412 && GET_MODE (result
) == insn_mode
3413 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3414 result
= gen_reg_rtx (insn_mode
);
3416 /* Make a place to hold the source address. We will not expand
3417 the actual source until we are sure that the expansion will
3418 not fail -- there are trees that cannot be expanded twice. */
3419 src_reg
= gen_reg_rtx (Pmode
);
3421 /* Mark the beginning of the strlen sequence so we can emit the
3422 source operand later. */
3423 before_strlen
= get_last_insn ();
3425 char_rtx
= const0_rtx
;
3426 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3427 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3429 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3431 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3432 char_rtx
, GEN_INT (align
));
3437 /* Now that we are assured of success, expand the source. */
3439 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3441 emit_move_insn (src_reg
, pat
);
3446 emit_insn_after (pat
, before_strlen
);
3448 emit_insn_before (pat
, get_insns ());
3450 /* Return the value in the proper mode for this function. */
3451 if (GET_MODE (result
) == target_mode
)
3453 else if (target
!= 0)
3454 convert_move (target
, result
, 0);
3456 target
= convert_to_mode (target_mode
, result
, 0);
3462 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3463 bytes from constant string DATA + OFFSET and return it as target
3467 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3468 enum machine_mode mode
)
3470 const char *str
= (const char *) data
;
3472 gcc_assert (offset
>= 0
3473 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3474 <= strlen (str
) + 1));
3476 return c_readstr (str
+ offset
, mode
);
3479 /* Expand a call EXP to the memcpy builtin.
3480 Return NULL_RTX if we failed, the caller should emit a normal call,
3481 otherwise try to get the result in TARGET, if convenient (and in
3482 mode MODE if that's convenient). */
3485 expand_builtin_memcpy (tree exp
, rtx target
)
3487 if (!validate_arglist (exp
,
3488 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3492 tree dest
= CALL_EXPR_ARG (exp
, 0);
3493 tree src
= CALL_EXPR_ARG (exp
, 1);
3494 tree len
= CALL_EXPR_ARG (exp
, 2);
3495 const char *src_str
;
3496 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3497 unsigned int dest_align
3498 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3499 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3500 HOST_WIDE_INT expected_size
= -1;
3501 unsigned int expected_align
= 0;
3503 /* If DEST is not a pointer type, call the normal function. */
3504 if (dest_align
== 0)
3507 /* If either SRC is not a pointer type, don't do this
3508 operation in-line. */
3512 if (currently_expanding_gimple_stmt
)
3513 stringop_block_profile (currently_expanding_gimple_stmt
,
3514 &expected_align
, &expected_size
);
3516 if (expected_align
< dest_align
)
3517 expected_align
= dest_align
;
3518 dest_mem
= get_memory_rtx (dest
, len
);
3519 set_mem_align (dest_mem
, dest_align
);
3520 len_rtx
= expand_normal (len
);
3521 src_str
= c_getstr (src
);
3523 /* If SRC is a string constant and block move would be done
3524 by pieces, we can avoid loading the string from memory
3525 and only stored the computed constants. */
3527 && CONST_INT_P (len_rtx
)
3528 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3529 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3530 CONST_CAST (char *, src_str
),
3533 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3534 builtin_memcpy_read_str
,
3535 CONST_CAST (char *, src_str
),
3536 dest_align
, false, 0);
3537 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3538 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3542 src_mem
= get_memory_rtx (src
, len
);
3543 set_mem_align (src_mem
, src_align
);
3545 /* Copy word part most expediently. */
3546 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3547 CALL_EXPR_TAILCALL (exp
)
3548 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3549 expected_align
, expected_size
);
3553 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3554 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3560 /* Expand a call EXP to the mempcpy builtin.
3561 Return NULL_RTX if we failed; the caller should emit a normal call,
3562 otherwise try to get the result in TARGET, if convenient (and in
3563 mode MODE if that's convenient). If ENDP is 0 return the
3564 destination pointer, if ENDP is 1 return the end pointer ala
3565 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3569 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3571 if (!validate_arglist (exp
,
3572 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3576 tree dest
= CALL_EXPR_ARG (exp
, 0);
3577 tree src
= CALL_EXPR_ARG (exp
, 1);
3578 tree len
= CALL_EXPR_ARG (exp
, 2);
3579 return expand_builtin_mempcpy_args (dest
, src
, len
,
3580 target
, mode
, /*endp=*/ 1);
3584 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3585 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3586 so that this can also be called without constructing an actual CALL_EXPR.
3587 The other arguments and return value are the same as for
3588 expand_builtin_mempcpy. */
3591 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3592 rtx target
, enum machine_mode mode
, int endp
)
3594 /* If return value is ignored, transform mempcpy into memcpy. */
3595 if (target
== const0_rtx
&& implicit_built_in_decls
[BUILT_IN_MEMCPY
])
3597 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3598 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3600 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3604 const char *src_str
;
3605 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3606 unsigned int dest_align
3607 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3608 rtx dest_mem
, src_mem
, len_rtx
;
3610 /* If either SRC or DEST is not a pointer type, don't do this
3611 operation in-line. */
3612 if (dest_align
== 0 || src_align
== 0)
3615 /* If LEN is not constant, call the normal function. */
3616 if (! host_integerp (len
, 1))
3619 len_rtx
= expand_normal (len
);
3620 src_str
= c_getstr (src
);
3622 /* If SRC is a string constant and block move would be done
3623 by pieces, we can avoid loading the string from memory
3624 and only stored the computed constants. */
3626 && CONST_INT_P (len_rtx
)
3627 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3628 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3629 CONST_CAST (char *, src_str
),
3632 dest_mem
= get_memory_rtx (dest
, len
);
3633 set_mem_align (dest_mem
, dest_align
);
3634 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3635 builtin_memcpy_read_str
,
3636 CONST_CAST (char *, src_str
),
3637 dest_align
, false, endp
);
3638 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3639 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3643 if (CONST_INT_P (len_rtx
)
3644 && can_move_by_pieces (INTVAL (len_rtx
),
3645 MIN (dest_align
, src_align
)))
3647 dest_mem
= get_memory_rtx (dest
, len
);
3648 set_mem_align (dest_mem
, dest_align
);
3649 src_mem
= get_memory_rtx (src
, len
);
3650 set_mem_align (src_mem
, src_align
);
3651 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3652 MIN (dest_align
, src_align
), endp
);
3653 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3654 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3663 # define HAVE_movstr 0
3664 # define CODE_FOR_movstr CODE_FOR_nothing
3667 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3668 we failed, the caller should emit a normal call, otherwise try to
3669 get the result in TARGET, if convenient. If ENDP is 0 return the
3670 destination pointer, if ENDP is 1 return the end pointer ala
3671 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3675 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3681 const struct insn_data_d
* data
;
3686 dest_mem
= get_memory_rtx (dest
, NULL
);
3687 src_mem
= get_memory_rtx (src
, NULL
);
3688 data
= insn_data
+ CODE_FOR_movstr
;
3691 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3692 dest_mem
= replace_equiv_address (dest_mem
, target
);
3693 end
= gen_reg_rtx (Pmode
);
3698 || target
== const0_rtx
3699 || ! (*data
->operand
[0].predicate
) (target
, Pmode
))
3701 end
= gen_reg_rtx (Pmode
);
3702 if (target
!= const0_rtx
)
3709 if (data
->operand
[0].mode
!= VOIDmode
)
3710 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3712 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3718 /* movstr is supposed to set end to the address of the NUL
3719 terminator. If the caller requested a mempcpy-like return value,
3721 if (endp
== 1 && target
!= const0_rtx
)
3723 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3724 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3730 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3731 NULL_RTX if we failed the caller should emit a normal call, otherwise
3732 try to get the result in TARGET, if convenient (and in mode MODE if that's
3736 expand_builtin_strcpy (tree exp
, rtx target
)
3738 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3740 tree dest
= CALL_EXPR_ARG (exp
, 0);
3741 tree src
= CALL_EXPR_ARG (exp
, 1);
3742 return expand_builtin_strcpy_args (dest
, src
, target
);
3747 /* Helper function to do the actual work for expand_builtin_strcpy. The
3748 arguments to the builtin_strcpy call DEST and SRC are broken out
3749 so that this can also be called without constructing an actual CALL_EXPR.
3750 The other arguments and return value are the same as for
3751 expand_builtin_strcpy. */
3754 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3756 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3759 /* Expand a call EXP to the stpcpy builtin.
3760 Return NULL_RTX if we failed the caller should emit a normal call,
3761 otherwise try to get the result in TARGET, if convenient (and in
3762 mode MODE if that's convenient). */
3765 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3768 location_t loc
= EXPR_LOCATION (exp
);
3770 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3773 dst
= CALL_EXPR_ARG (exp
, 0);
3774 src
= CALL_EXPR_ARG (exp
, 1);
3776 /* If return value is ignored, transform stpcpy into strcpy. */
3777 if (target
== const0_rtx
&& implicit_built_in_decls
[BUILT_IN_STRCPY
])
3779 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3780 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3781 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3788 /* Ensure we get an actual string whose length can be evaluated at
3789 compile-time, not an expression containing a string. This is
3790 because the latter will potentially produce pessimized code
3791 when used to produce the return value. */
3792 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3793 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3795 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3796 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3797 target
, mode
, /*endp=*/2);
3802 if (TREE_CODE (len
) == INTEGER_CST
)
3804 rtx len_rtx
= expand_normal (len
);
3806 if (CONST_INT_P (len_rtx
))
3808 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3814 if (mode
!= VOIDmode
)
3815 target
= gen_reg_rtx (mode
);
3817 target
= gen_reg_rtx (GET_MODE (ret
));
3819 if (GET_MODE (target
) != GET_MODE (ret
))
3820 ret
= gen_lowpart (GET_MODE (target
), ret
);
3822 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3823 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3831 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3835 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3836 bytes from constant string DATA + OFFSET and return it as target
3840 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3841 enum machine_mode mode
)
3843 const char *str
= (const char *) data
;
3845 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3848 return c_readstr (str
+ offset
, mode
);
3851 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3852 NULL_RTX if we failed the caller should emit a normal call. */
3855 expand_builtin_strncpy (tree exp
, rtx target
)
3857 location_t loc
= EXPR_LOCATION (exp
);
3859 if (validate_arglist (exp
,
3860 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3862 tree dest
= CALL_EXPR_ARG (exp
, 0);
3863 tree src
= CALL_EXPR_ARG (exp
, 1);
3864 tree len
= CALL_EXPR_ARG (exp
, 2);
3865 tree slen
= c_strlen (src
, 1);
3867 /* We must be passed a constant len and src parameter. */
3868 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3871 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3873 /* We're required to pad with trailing zeros if the requested
3874 len is greater than strlen(s2)+1. In that case try to
3875 use store_by_pieces, if it fails, punt. */
3876 if (tree_int_cst_lt (slen
, len
))
3878 unsigned int dest_align
3879 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3880 const char *p
= c_getstr (src
);
3883 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3884 || !can_store_by_pieces (tree_low_cst (len
, 1),
3885 builtin_strncpy_read_str
,
3886 CONST_CAST (char *, p
),
3890 dest_mem
= get_memory_rtx (dest
, len
);
3891 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3892 builtin_strncpy_read_str
,
3893 CONST_CAST (char *, p
), dest_align
, false, 0);
3894 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3895 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3902 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3903 bytes from constant string DATA + OFFSET and return it as target
3907 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3908 enum machine_mode mode
)
3910 const char *c
= (const char *) data
;
3911 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3913 memset (p
, *c
, GET_MODE_SIZE (mode
));
3915 return c_readstr (p
, mode
);
3918 /* Callback routine for store_by_pieces. Return the RTL of a register
3919 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3920 char value given in the RTL register data. For example, if mode is
3921 4 bytes wide, return the RTL for 0x01010101*data. */
3924 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3925 enum machine_mode mode
)
3931 size
= GET_MODE_SIZE (mode
);
3935 p
= XALLOCAVEC (char, size
);
3936 memset (p
, 1, size
);
3937 coeff
= c_readstr (p
, mode
);
3939 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3940 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3941 return force_reg (mode
, target
);
3944 /* Expand expression EXP, which is a call to the memset builtin. Return
3945 NULL_RTX if we failed the caller should emit a normal call, otherwise
3946 try to get the result in TARGET, if convenient (and in mode MODE if that's
3950 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3952 if (!validate_arglist (exp
,
3953 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3957 tree dest
= CALL_EXPR_ARG (exp
, 0);
3958 tree val
= CALL_EXPR_ARG (exp
, 1);
3959 tree len
= CALL_EXPR_ARG (exp
, 2);
3960 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3964 /* Helper function to do the actual work for expand_builtin_memset. The
3965 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3966 so that this can also be called without constructing an actual CALL_EXPR.
3967 The other arguments and return value are the same as for
3968 expand_builtin_memset. */
3971 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3972 rtx target
, enum machine_mode mode
, tree orig_exp
)
3975 enum built_in_function fcode
;
3977 unsigned int dest_align
;
3978 rtx dest_mem
, dest_addr
, len_rtx
;
3979 HOST_WIDE_INT expected_size
= -1;
3980 unsigned int expected_align
= 0;
3982 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3984 /* If DEST is not a pointer type, don't do this operation in-line. */
3985 if (dest_align
== 0)
3988 if (currently_expanding_gimple_stmt
)
3989 stringop_block_profile (currently_expanding_gimple_stmt
,
3990 &expected_align
, &expected_size
);
3992 if (expected_align
< dest_align
)
3993 expected_align
= dest_align
;
3995 /* If the LEN parameter is zero, return DEST. */
3996 if (integer_zerop (len
))
3998 /* Evaluate and ignore VAL in case it has side-effects. */
3999 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4000 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4003 /* Stabilize the arguments in case we fail. */
4004 dest
= builtin_save_expr (dest
);
4005 val
= builtin_save_expr (val
);
4006 len
= builtin_save_expr (len
);
4008 len_rtx
= expand_normal (len
);
4009 dest_mem
= get_memory_rtx (dest
, len
);
4011 if (TREE_CODE (val
) != INTEGER_CST
)
4015 val_rtx
= expand_normal (val
);
4016 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
4019 /* Assume that we can memset by pieces if we can store
4020 * the coefficients by pieces (in the required modes).
4021 * We can't pass builtin_memset_gen_str as that emits RTL. */
4023 if (host_integerp (len
, 1)
4024 && can_store_by_pieces (tree_low_cst (len
, 1),
4025 builtin_memset_read_str
, &c
, dest_align
,
4028 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
4030 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
4031 builtin_memset_gen_str
, val_rtx
, dest_align
,
4034 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4035 dest_align
, expected_align
,
4039 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4040 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4044 if (target_char_cast (val
, &c
))
4049 if (host_integerp (len
, 1)
4050 && can_store_by_pieces (tree_low_cst (len
, 1),
4051 builtin_memset_read_str
, &c
, dest_align
,
4053 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
4054 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4055 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
4056 dest_align
, expected_align
,
4060 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4061 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4065 set_mem_align (dest_mem
, dest_align
);
4066 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4067 CALL_EXPR_TAILCALL (orig_exp
)
4068 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4069 expected_align
, expected_size
);
4073 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4074 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4080 fndecl
= get_callee_fndecl (orig_exp
);
4081 fcode
= DECL_FUNCTION_CODE (fndecl
);
4082 if (fcode
== BUILT_IN_MEMSET
)
4083 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4085 else if (fcode
== BUILT_IN_BZERO
)
4086 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4090 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4091 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4092 return expand_call (fn
, target
, target
== const0_rtx
);
4095 /* Expand expression EXP, which is a call to the bzero builtin. Return
4096 NULL_RTX if we failed the caller should emit a normal call. */
4099 expand_builtin_bzero (tree exp
)
4102 location_t loc
= EXPR_LOCATION (exp
);
4104 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4107 dest
= CALL_EXPR_ARG (exp
, 0);
4108 size
= CALL_EXPR_ARG (exp
, 1);
4110 /* New argument list transforming bzero(ptr x, int y) to
4111 memset(ptr x, int 0, size_t y). This is done this way
4112 so that if it isn't expanded inline, we fallback to
4113 calling bzero instead of memset. */
4115 return expand_builtin_memset_args (dest
, integer_zero_node
,
4116 fold_convert_loc (loc
, sizetype
, size
),
4117 const0_rtx
, VOIDmode
, exp
);
4120 /* Expand expression EXP, which is a call to the memcmp built-in function.
4121 Return NULL_RTX if we failed and the
4122 caller should emit a normal call, otherwise try to get the result in
4123 TARGET, if convenient (and in mode MODE, if that's convenient). */
4126 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4127 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4129 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4131 if (!validate_arglist (exp
,
4132 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4135 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4137 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4140 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4141 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4142 tree len
= CALL_EXPR_ARG (exp
, 2);
4144 unsigned int arg1_align
4145 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4146 unsigned int arg2_align
4147 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4148 enum machine_mode insn_mode
;
4150 #ifdef HAVE_cmpmemsi
4152 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4155 #ifdef HAVE_cmpstrnsi
4157 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4162 /* If we don't have POINTER_TYPE, call the function. */
4163 if (arg1_align
== 0 || arg2_align
== 0)
4166 /* Make a place to write the result of the instruction. */
4169 && REG_P (result
) && GET_MODE (result
) == insn_mode
4170 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4171 result
= gen_reg_rtx (insn_mode
);
4173 arg1_rtx
= get_memory_rtx (arg1
, len
);
4174 arg2_rtx
= get_memory_rtx (arg2
, len
);
4175 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4177 /* Set MEM_SIZE as appropriate. */
4178 if (CONST_INT_P (arg3_rtx
))
4180 set_mem_size (arg1_rtx
, arg3_rtx
);
4181 set_mem_size (arg2_rtx
, arg3_rtx
);
4184 #ifdef HAVE_cmpmemsi
4186 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4187 GEN_INT (MIN (arg1_align
, arg2_align
)));
4190 #ifdef HAVE_cmpstrnsi
4192 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4193 GEN_INT (MIN (arg1_align
, arg2_align
)));
4201 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4202 TYPE_MODE (integer_type_node
), 3,
4203 XEXP (arg1_rtx
, 0), Pmode
,
4204 XEXP (arg2_rtx
, 0), Pmode
,
4205 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4206 TYPE_UNSIGNED (sizetype
)),
4207 TYPE_MODE (sizetype
));
4209 /* Return the value in the proper mode for this function. */
4210 mode
= TYPE_MODE (TREE_TYPE (exp
));
4211 if (GET_MODE (result
) == mode
)
4213 else if (target
!= 0)
4215 convert_move (target
, result
, 0);
4219 return convert_to_mode (mode
, result
, 0);
4226 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4227 if we failed the caller should emit a normal call, otherwise try to get
4228 the result in TARGET, if convenient. */
4231 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4233 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4236 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4237 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4238 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4240 rtx arg1_rtx
, arg2_rtx
;
4241 rtx result
, insn
= NULL_RTX
;
4243 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4244 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4246 unsigned int arg1_align
4247 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4248 unsigned int arg2_align
4249 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4251 /* If we don't have POINTER_TYPE, call the function. */
4252 if (arg1_align
== 0 || arg2_align
== 0)
4255 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4256 arg1
= builtin_save_expr (arg1
);
4257 arg2
= builtin_save_expr (arg2
);
4259 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4260 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4262 #ifdef HAVE_cmpstrsi
4263 /* Try to call cmpstrsi. */
4266 enum machine_mode insn_mode
4267 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4269 /* Make a place to write the result of the instruction. */
4272 && REG_P (result
) && GET_MODE (result
) == insn_mode
4273 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4274 result
= gen_reg_rtx (insn_mode
);
4276 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4277 GEN_INT (MIN (arg1_align
, arg2_align
)));
4280 #ifdef HAVE_cmpstrnsi
4281 /* Try to determine at least one length and call cmpstrnsi. */
4282 if (!insn
&& HAVE_cmpstrnsi
)
4287 enum machine_mode insn_mode
4288 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4289 tree len1
= c_strlen (arg1
, 1);
4290 tree len2
= c_strlen (arg2
, 1);
4293 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4295 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4297 /* If we don't have a constant length for the first, use the length
4298 of the second, if we know it. We don't require a constant for
4299 this case; some cost analysis could be done if both are available
4300 but neither is constant. For now, assume they're equally cheap,
4301 unless one has side effects. If both strings have constant lengths,
4308 else if (TREE_SIDE_EFFECTS (len1
))
4310 else if (TREE_SIDE_EFFECTS (len2
))
4312 else if (TREE_CODE (len1
) != INTEGER_CST
)
4314 else if (TREE_CODE (len2
) != INTEGER_CST
)
4316 else if (tree_int_cst_lt (len1
, len2
))
4321 /* If both arguments have side effects, we cannot optimize. */
4322 if (!len
|| TREE_SIDE_EFFECTS (len
))
4325 arg3_rtx
= expand_normal (len
);
4327 /* Make a place to write the result of the instruction. */
4330 && REG_P (result
) && GET_MODE (result
) == insn_mode
4331 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4332 result
= gen_reg_rtx (insn_mode
);
4334 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4335 GEN_INT (MIN (arg1_align
, arg2_align
)));
4341 enum machine_mode mode
;
4344 /* Return the value in the proper mode for this function. */
4345 mode
= TYPE_MODE (TREE_TYPE (exp
));
4346 if (GET_MODE (result
) == mode
)
4349 return convert_to_mode (mode
, result
, 0);
4350 convert_move (target
, result
, 0);
4354 /* Expand the library call ourselves using a stabilized argument
4355 list to avoid re-evaluating the function's arguments twice. */
4356 #ifdef HAVE_cmpstrnsi
4359 fndecl
= get_callee_fndecl (exp
);
4360 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4361 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4362 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4363 return expand_call (fn
, target
, target
== const0_rtx
);
4369 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4370 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4371 the result in TARGET, if convenient. */
4374 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4375 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4377 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4379 if (!validate_arglist (exp
,
4380 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4383 /* If c_strlen can determine an expression for one of the string
4384 lengths, and it doesn't have side effects, then emit cmpstrnsi
4385 using length MIN(strlen(string)+1, arg3). */
4386 #ifdef HAVE_cmpstrnsi
4389 tree len
, len1
, len2
;
4390 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4393 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4394 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4395 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4397 unsigned int arg1_align
4398 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4399 unsigned int arg2_align
4400 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4401 enum machine_mode insn_mode
4402 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4404 len1
= c_strlen (arg1
, 1);
4405 len2
= c_strlen (arg2
, 1);
4408 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4410 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4412 /* If we don't have a constant length for the first, use the length
4413 of the second, if we know it. We don't require a constant for
4414 this case; some cost analysis could be done if both are available
4415 but neither is constant. For now, assume they're equally cheap,
4416 unless one has side effects. If both strings have constant lengths,
4423 else if (TREE_SIDE_EFFECTS (len1
))
4425 else if (TREE_SIDE_EFFECTS (len2
))
4427 else if (TREE_CODE (len1
) != INTEGER_CST
)
4429 else if (TREE_CODE (len2
) != INTEGER_CST
)
4431 else if (tree_int_cst_lt (len1
, len2
))
4436 /* If both arguments have side effects, we cannot optimize. */
4437 if (!len
|| TREE_SIDE_EFFECTS (len
))
4440 /* The actual new length parameter is MIN(len,arg3). */
4441 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4442 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4444 /* If we don't have POINTER_TYPE, call the function. */
4445 if (arg1_align
== 0 || arg2_align
== 0)
4448 /* Make a place to write the result of the instruction. */
4451 && REG_P (result
) && GET_MODE (result
) == insn_mode
4452 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4453 result
= gen_reg_rtx (insn_mode
);
4455 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4456 arg1
= builtin_save_expr (arg1
);
4457 arg2
= builtin_save_expr (arg2
);
4458 len
= builtin_save_expr (len
);
4460 arg1_rtx
= get_memory_rtx (arg1
, len
);
4461 arg2_rtx
= get_memory_rtx (arg2
, len
);
4462 arg3_rtx
= expand_normal (len
);
4463 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4464 GEN_INT (MIN (arg1_align
, arg2_align
)));
4469 /* Return the value in the proper mode for this function. */
4470 mode
= TYPE_MODE (TREE_TYPE (exp
));
4471 if (GET_MODE (result
) == mode
)
4474 return convert_to_mode (mode
, result
, 0);
4475 convert_move (target
, result
, 0);
4479 /* Expand the library call ourselves using a stabilized argument
4480 list to avoid re-evaluating the function's arguments twice. */
4481 fndecl
= get_callee_fndecl (exp
);
4482 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4484 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4485 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4486 return expand_call (fn
, target
, target
== const0_rtx
);
4492 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4493 if that's convenient. */
4496 expand_builtin_saveregs (void)
4500 /* Don't do __builtin_saveregs more than once in a function.
4501 Save the result of the first call and reuse it. */
4502 if (saveregs_value
!= 0)
4503 return saveregs_value
;
4505 /* When this function is called, it means that registers must be
4506 saved on entry to this function. So we migrate the call to the
4507 first insn of this function. */
4511 /* Do whatever the machine needs done in this case. */
4512 val
= targetm
.calls
.expand_builtin_saveregs ();
4517 saveregs_value
= val
;
4519 /* Put the insns after the NOTE that starts the function. If this
4520 is inside a start_sequence, make the outer-level insn chain current, so
4521 the code is placed at the start of the function. */
4522 push_topmost_sequence ();
4523 emit_insn_after (seq
, entry_of_function ());
4524 pop_topmost_sequence ();
4529 /* Expand a call to __builtin_next_arg. */
4532 expand_builtin_next_arg (void)
4534 /* Checking arguments is already done in fold_builtin_next_arg
4535 that must be called before this function. */
4536 return expand_binop (ptr_mode
, add_optab
,
4537 crtl
->args
.internal_arg_pointer
,
4538 crtl
->args
.arg_offset_rtx
,
4539 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4542 /* Make it easier for the backends by protecting the valist argument
4543 from multiple evaluations. */
4546 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4548 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4550 /* The current way of determining the type of valist is completely
4551 bogus. We should have the information on the va builtin instead. */
4553 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4555 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4557 if (TREE_SIDE_EFFECTS (valist
))
4558 valist
= save_expr (valist
);
4560 /* For this case, the backends will be expecting a pointer to
4561 vatype, but it's possible we've actually been given an array
4562 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4564 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4566 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4567 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4572 tree pt
= build_pointer_type (vatype
);
4576 if (! TREE_SIDE_EFFECTS (valist
))
4579 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4580 TREE_SIDE_EFFECTS (valist
) = 1;
4583 if (TREE_SIDE_EFFECTS (valist
))
4584 valist
= save_expr (valist
);
4585 valist
= fold_build2_loc (loc
, MEM_REF
,
4586 vatype
, valist
, build_int_cst (pt
, 0));
4592 /* The "standard" definition of va_list is void*. */
4595 std_build_builtin_va_list (void)
4597 return ptr_type_node
;
4600 /* The "standard" abi va_list is va_list_type_node. */
4603 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4605 return va_list_type_node
;
4608 /* The "standard" type of va_list is va_list_type_node. */
4611 std_canonical_va_list_type (tree type
)
4615 if (INDIRECT_REF_P (type
))
4616 type
= TREE_TYPE (type
);
4617 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4618 type
= TREE_TYPE (type
);
4619 wtype
= va_list_type_node
;
4621 /* Treat structure va_list types. */
4622 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4623 htype
= TREE_TYPE (htype
);
4624 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4626 /* If va_list is an array type, the argument may have decayed
4627 to a pointer type, e.g. by being passed to another function.
4628 In that case, unwrap both types so that we can compare the
4629 underlying records. */
4630 if (TREE_CODE (htype
) == ARRAY_TYPE
4631 || POINTER_TYPE_P (htype
))
4633 wtype
= TREE_TYPE (wtype
);
4634 htype
= TREE_TYPE (htype
);
4637 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4638 return va_list_type_node
;
4643 /* The "standard" implementation of va_start: just assign `nextarg' to
4647 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4649 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4650 convert_move (va_r
, nextarg
, 0);
4653 /* Expand EXP, a call to __builtin_va_start. */
4656 expand_builtin_va_start (tree exp
)
4660 location_t loc
= EXPR_LOCATION (exp
);
4662 if (call_expr_nargs (exp
) < 2)
4664 error_at (loc
, "too few arguments to function %<va_start%>");
4668 if (fold_builtin_next_arg (exp
, true))
4671 nextarg
= expand_builtin_next_arg ();
4672 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4674 if (targetm
.expand_builtin_va_start
)
4675 targetm
.expand_builtin_va_start (valist
, nextarg
);
4677 std_expand_builtin_va_start (valist
, nextarg
);
4682 /* The "standard" implementation of va_arg: read the value from the
4683 current (padded) address and increment by the (padded) size. */
4686 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4689 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4690 unsigned HOST_WIDE_INT align
, boundary
;
4693 #ifdef ARGS_GROW_DOWNWARD
4694 /* All of the alignment and movement below is for args-grow-up machines.
4695 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4696 implement their own specialized gimplify_va_arg_expr routines. */
4700 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4702 type
= build_pointer_type (type
);
4704 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4705 boundary
= targetm
.calls
.function_arg_boundary (TYPE_MODE (type
), type
);
4707 /* When we align parameter on stack for caller, if the parameter
4708 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4709 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4710 here with caller. */
4711 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4712 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4714 boundary
/= BITS_PER_UNIT
;
4716 /* Hoist the valist value into a temporary for the moment. */
4717 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4719 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4720 requires greater alignment, we must perform dynamic alignment. */
4721 if (boundary
> align
4722 && !integer_zerop (TYPE_SIZE (type
)))
4724 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4725 fold_build2 (POINTER_PLUS_EXPR
,
4727 valist_tmp
, size_int (boundary
- 1)));
4728 gimplify_and_add (t
, pre_p
);
4730 t
= fold_convert (sizetype
, valist_tmp
);
4731 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4732 fold_convert (TREE_TYPE (valist
),
4733 fold_build2 (BIT_AND_EXPR
, sizetype
, t
,
4734 size_int (-boundary
))));
4735 gimplify_and_add (t
, pre_p
);
4740 /* If the actual alignment is less than the alignment of the type,
4741 adjust the type accordingly so that we don't assume strict alignment
4742 when dereferencing the pointer. */
4743 boundary
*= BITS_PER_UNIT
;
4744 if (boundary
< TYPE_ALIGN (type
))
4746 type
= build_variant_type_copy (type
);
4747 TYPE_ALIGN (type
) = boundary
;
4750 /* Compute the rounded size of the type. */
4751 type_size
= size_in_bytes (type
);
4752 rounded_size
= round_up (type_size
, align
);
4754 /* Reduce rounded_size so it's sharable with the postqueue. */
4755 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4759 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4761 /* Small args are padded downward. */
4762 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
4763 rounded_size
, size_int (align
));
4764 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4765 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4766 addr
= fold_build2 (POINTER_PLUS_EXPR
,
4767 TREE_TYPE (addr
), addr
, t
);
4770 /* Compute new value for AP. */
4771 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, rounded_size
);
4772 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4773 gimplify_and_add (t
, pre_p
);
4775 addr
= fold_convert (build_pointer_type (type
), addr
);
4778 addr
= build_va_arg_indirect_ref (addr
);
4780 return build_va_arg_indirect_ref (addr
);
4783 /* Build an indirect-ref expression over the given TREE, which represents a
4784 piece of a va_arg() expansion. */
4786 build_va_arg_indirect_ref (tree addr
)
4788 addr
= build_fold_indirect_ref_loc (EXPR_LOCATION (addr
), addr
);
4790 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4796 /* Return a dummy expression of type TYPE in order to keep going after an
4800 dummy_object (tree type
)
4802 tree t
= build_int_cst (build_pointer_type (type
), 0);
4803 return build2 (MEM_REF
, type
, t
, t
);
4806 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4807 builtin function, but a very special sort of operator. */
4809 enum gimplify_status
4810 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4812 tree promoted_type
, have_va_type
;
4813 tree valist
= TREE_OPERAND (*expr_p
, 0);
4814 tree type
= TREE_TYPE (*expr_p
);
4816 location_t loc
= EXPR_LOCATION (*expr_p
);
4818 /* Verify that valist is of the proper type. */
4819 have_va_type
= TREE_TYPE (valist
);
4820 if (have_va_type
== error_mark_node
)
4822 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4824 if (have_va_type
== NULL_TREE
)
4826 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4830 /* Generate a diagnostic for requesting data of a type that cannot
4831 be passed through `...' due to type promotion at the call site. */
4832 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4835 static bool gave_help
;
4838 /* Unfortunately, this is merely undefined, rather than a constraint
4839 violation, so we cannot make this an error. If this call is never
4840 executed, the program is still strictly conforming. */
4841 warned
= warning_at (loc
, 0,
4842 "%qT is promoted to %qT when passed through %<...%>",
4843 type
, promoted_type
);
4844 if (!gave_help
&& warned
)
4847 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4848 promoted_type
, type
);
4851 /* We can, however, treat "undefined" any way we please.
4852 Call abort to encourage the user to fix the program. */
4854 inform (loc
, "if this code is reached, the program will abort");
4855 /* Before the abort, allow the evaluation of the va_list
4856 expression to exit or longjmp. */
4857 gimplify_and_add (valist
, pre_p
);
4858 t
= build_call_expr_loc (loc
,
4859 implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4860 gimplify_and_add (t
, pre_p
);
4862 /* This is dead code, but go ahead and finish so that the
4863 mode of the result comes out right. */
4864 *expr_p
= dummy_object (type
);
4869 /* Make it easier for the backends by protecting the valist argument
4870 from multiple evaluations. */
4871 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4873 /* For this case, the backends will be expecting a pointer to
4874 TREE_TYPE (abi), but it's possible we've
4875 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4877 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4879 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4880 valist
= fold_convert_loc (loc
, p1
,
4881 build_fold_addr_expr_loc (loc
, valist
));
4884 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4887 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4889 if (!targetm
.gimplify_va_arg_expr
)
4890 /* FIXME: Once most targets are converted we should merely
4891 assert this is non-null. */
4894 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4899 /* Expand EXP, a call to __builtin_va_end. */
4902 expand_builtin_va_end (tree exp
)
4904 tree valist
= CALL_EXPR_ARG (exp
, 0);
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4908 if (TREE_SIDE_EFFECTS (valist
))
4909 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4919 expand_builtin_va_copy (tree exp
)
4922 location_t loc
= EXPR_LOCATION (exp
);
4924 dst
= CALL_EXPR_ARG (exp
, 0);
4925 src
= CALL_EXPR_ARG (exp
, 1);
4927 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4928 src
= stabilize_va_list_loc (loc
, src
, 0);
4930 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4932 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4934 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4935 TREE_SIDE_EFFECTS (t
) = 1;
4936 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4940 rtx dstb
, srcb
, size
;
4942 /* Evaluate to pointers. */
4943 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4944 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4945 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4946 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4948 dstb
= convert_memory_address (Pmode
, dstb
);
4949 srcb
= convert_memory_address (Pmode
, srcb
);
4951 /* "Dereference" to BLKmode memories. */
4952 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4953 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4954 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4955 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4956 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4957 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4960 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4966 /* Expand a call to one of the builtin functions __builtin_frame_address or
4967 __builtin_return_address. */
4970 expand_builtin_frame_address (tree fndecl
, tree exp
)
4972 /* The argument must be a nonnegative integer constant.
4973 It counts the number of frames to scan up the stack.
4974 The value is the return address saved in that frame. */
4975 if (call_expr_nargs (exp
) == 0)
4976 /* Warning about missing arg was already issued. */
4978 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4980 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4981 error ("invalid argument to %<__builtin_frame_address%>");
4983 error ("invalid argument to %<__builtin_return_address%>");
4989 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4990 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4992 /* Some ports cannot access arbitrary stack frames. */
4995 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4996 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4998 warning (0, "unsupported argument to %<__builtin_return_address%>");
5002 /* For __builtin_frame_address, return what we've got. */
5003 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5007 && ! CONSTANT_P (tem
))
5008 tem
= copy_to_mode_reg (Pmode
, tem
);
5013 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5014 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
5015 is the same as for allocate_dynamic_stack_space. */
5018 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
5023 /* Emit normal call if marked not-inlineable. */
5024 if (CALL_CANNOT_INLINE_P (exp
))
5027 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5030 /* Compute the argument. */
5031 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5033 /* Allocate the desired space. */
5034 result
= allocate_dynamic_stack_space (op0
, 0, BIGGEST_ALIGNMENT
,
5036 result
= convert_memory_address (ptr_mode
, result
);
5041 /* Expand a call to a bswap builtin with argument ARG0. MODE
5042 is the mode to expand with. */
5045 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5047 enum machine_mode mode
;
5051 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5054 arg
= CALL_EXPR_ARG (exp
, 0);
5055 mode
= TYPE_MODE (TREE_TYPE (arg
));
5056 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5058 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5060 gcc_assert (target
);
5062 return convert_to_mode (mode
, target
, 0);
5065 /* Expand a call to a unary builtin in EXP.
5066 Return NULL_RTX if a normal call should be emitted rather than expanding the
5067 function in-line. If convenient, the result should be placed in TARGET.
5068 SUBTARGET may be used as the target for computing one of EXP's operands. */
5071 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5072 rtx subtarget
, optab op_optab
)
5076 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5079 /* Compute the argument. */
5080 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5082 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5083 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5084 VOIDmode
, EXPAND_NORMAL
);
5085 /* Compute op, into TARGET if possible.
5086 Set TARGET to wherever the result comes back. */
5087 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5088 op_optab
, op0
, target
, 1);
5089 gcc_assert (target
);
5091 return convert_to_mode (target_mode
, target
, 0);
5094 /* Expand a call to __builtin_expect. We just return our argument
5095 as the builtin_expect semantic should've been already executed by
5096 tree branch prediction pass. */
5099 expand_builtin_expect (tree exp
, rtx target
)
5103 if (call_expr_nargs (exp
) < 2)
5105 arg
= CALL_EXPR_ARG (exp
, 0);
5107 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5108 /* When guessing was done, the hints should be already stripped away. */
5109 gcc_assert (!flag_guess_branch_prob
5110 || optimize
== 0 || seen_error ());
5115 expand_builtin_trap (void)
5119 emit_insn (gen_trap ());
5122 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5126 /* Expand a call to __builtin_unreachable. We do nothing except emit
5127 a barrier saying that control flow will not pass here.
5129 It is the responsibility of the program being compiled to ensure
5130 that control flow does never reach __builtin_unreachable. */
5132 expand_builtin_unreachable (void)
5137 /* Expand EXP, a call to fabs, fabsf or fabsl.
5138 Return NULL_RTX if a normal call should be emitted rather than expanding
5139 the function inline. If convenient, the result should be placed
5140 in TARGET. SUBTARGET may be used as the target for computing
5144 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5146 enum machine_mode mode
;
5150 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5153 arg
= CALL_EXPR_ARG (exp
, 0);
5154 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5155 mode
= TYPE_MODE (TREE_TYPE (arg
));
5156 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5157 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5160 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5161 Return NULL is a normal call should be emitted rather than expanding the
5162 function inline. If convenient, the result should be placed in TARGET.
5163 SUBTARGET may be used as the target for computing the operand. */
5166 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5171 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5174 arg
= CALL_EXPR_ARG (exp
, 0);
5175 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5177 arg
= CALL_EXPR_ARG (exp
, 1);
5178 op1
= expand_normal (arg
);
5180 return expand_copysign (op0
, op1
, target
);
5183 /* Create a new constant string literal and return a char* pointer to it.
5184 The STRING_CST value is the LEN characters at STR. */
5186 build_string_literal (int len
, const char *str
)
5188 tree t
, elem
, index
, type
;
5190 t
= build_string (len
, str
);
5191 elem
= build_type_variant (char_type_node
, 1, 0);
5192 index
= build_index_type (size_int (len
- 1));
5193 type
= build_array_type (elem
, index
);
5194 TREE_TYPE (t
) = type
;
5195 TREE_CONSTANT (t
) = 1;
5196 TREE_READONLY (t
) = 1;
5197 TREE_STATIC (t
) = 1;
5199 type
= build_pointer_type (elem
);
5200 t
= build1 (ADDR_EXPR
, type
,
5201 build4 (ARRAY_REF
, elem
,
5202 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
5206 /* Expand a call to either the entry or exit function profiler. */
5209 expand_builtin_profile_func (bool exitp
)
5211 rtx this_rtx
, which
;
5213 this_rtx
= DECL_RTL (current_function_decl
);
5214 gcc_assert (MEM_P (this_rtx
));
5215 this_rtx
= XEXP (this_rtx
, 0);
5218 which
= profile_function_exit_libfunc
;
5220 which
= profile_function_entry_libfunc
;
5222 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this_rtx
, Pmode
,
5223 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5230 /* Expand a call to __builtin___clear_cache. */
5233 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
5235 #ifndef HAVE_clear_cache
5236 #ifdef CLEAR_INSN_CACHE
5237 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5238 does something. Just do the default expansion to a call to
5242 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5243 does nothing. There is no need to call it. Do nothing. */
5245 #endif /* CLEAR_INSN_CACHE */
5247 /* We have a "clear_cache" insn, and it will handle everything. */
5249 rtx begin_rtx
, end_rtx
;
5250 enum insn_code icode
;
5252 /* We must not expand to a library call. If we did, any
5253 fallback library function in libgcc that might contain a call to
5254 __builtin___clear_cache() would recurse infinitely. */
5255 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5257 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5261 if (HAVE_clear_cache
)
5263 icode
= CODE_FOR_clear_cache
;
5265 begin
= CALL_EXPR_ARG (exp
, 0);
5266 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5267 begin_rtx
= convert_memory_address (Pmode
, begin_rtx
);
5268 if (!insn_data
[icode
].operand
[0].predicate (begin_rtx
, Pmode
))
5269 begin_rtx
= copy_to_mode_reg (Pmode
, begin_rtx
);
5271 end
= CALL_EXPR_ARG (exp
, 1);
5272 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5273 end_rtx
= convert_memory_address (Pmode
, end_rtx
);
5274 if (!insn_data
[icode
].operand
[1].predicate (end_rtx
, Pmode
))
5275 end_rtx
= copy_to_mode_reg (Pmode
, end_rtx
);
5277 emit_insn (gen_clear_cache (begin_rtx
, end_rtx
));
5280 #endif /* HAVE_clear_cache */
5283 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5286 round_trampoline_addr (rtx tramp
)
5288 rtx temp
, addend
, mask
;
5290 /* If we don't need too much alignment, we'll have been guaranteed
5291 proper alignment by get_trampoline_type. */
5292 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5295 /* Round address up to desired boundary. */
5296 temp
= gen_reg_rtx (Pmode
);
5297 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5298 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5300 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5301 temp
, 0, OPTAB_LIB_WIDEN
);
5302 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5303 temp
, 0, OPTAB_LIB_WIDEN
);
5309 expand_builtin_init_trampoline (tree exp
)
5311 tree t_tramp
, t_func
, t_chain
;
5312 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5314 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5315 POINTER_TYPE
, VOID_TYPE
))
5318 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5319 t_func
= CALL_EXPR_ARG (exp
, 1);
5320 t_chain
= CALL_EXPR_ARG (exp
, 2);
5322 r_tramp
= expand_normal (t_tramp
);
5323 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5324 MEM_NOTRAP_P (m_tramp
) = 1;
5326 /* The TRAMP argument should be the address of a field within the
5327 local function's FRAME decl. Let's see if we can fill in the
5328 to fill in the MEM_ATTRs for this memory. */
5329 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5330 set_mem_attributes_minus_bitpos (m_tramp
, TREE_OPERAND (t_tramp
, 0),
5333 tmp
= round_trampoline_addr (r_tramp
);
5336 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5337 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5338 set_mem_size (m_tramp
, GEN_INT (TRAMPOLINE_SIZE
));
5341 /* The FUNC argument should be the address of the nested function.
5342 Extract the actual function decl to pass to the hook. */
5343 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5344 t_func
= TREE_OPERAND (t_func
, 0);
5345 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5347 r_chain
= expand_normal (t_chain
);
5349 /* Generate insns to initialize the trampoline. */
5350 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5352 trampolines_created
= 1;
5354 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5355 "trampoline generated for nested function %qD", t_func
);
5361 expand_builtin_adjust_trampoline (tree exp
)
5365 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5368 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5369 tramp
= round_trampoline_addr (tramp
);
5370 if (targetm
.calls
.trampoline_adjust_address
)
5371 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5376 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5377 function. The function first checks whether the back end provides
5378 an insn to implement signbit for the respective mode. If not, it
5379 checks whether the floating point format of the value is such that
5380 the sign bit can be extracted. If that is not the case, the
5381 function returns NULL_RTX to indicate that a normal call should be
5382 emitted rather than expanding the function in-line. EXP is the
5383 expression that is a call to the builtin function; if convenient,
5384 the result should be placed in TARGET. */
5386 expand_builtin_signbit (tree exp
, rtx target
)
5388 const struct real_format
*fmt
;
5389 enum machine_mode fmode
, imode
, rmode
;
5392 enum insn_code icode
;
5394 location_t loc
= EXPR_LOCATION (exp
);
5396 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5399 arg
= CALL_EXPR_ARG (exp
, 0);
5400 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5401 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5402 fmt
= REAL_MODE_FORMAT (fmode
);
5404 arg
= builtin_save_expr (arg
);
5406 /* Expand the argument yielding a RTX expression. */
5407 temp
= expand_normal (arg
);
5409 /* Check if the back end provides an insn that handles signbit for the
5411 icode
= optab_handler (signbit_optab
, fmode
);
5412 if (icode
!= CODE_FOR_nothing
)
5414 rtx last
= get_last_insn ();
5415 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5416 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5418 delete_insns_since (last
);
5421 /* For floating point formats without a sign bit, implement signbit
5423 bitpos
= fmt
->signbit_ro
;
5426 /* But we can't do this if the format supports signed zero. */
5427 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5430 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5431 build_real (TREE_TYPE (arg
), dconst0
));
5432 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5435 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5437 imode
= int_mode_for_mode (fmode
);
5438 if (imode
== BLKmode
)
5440 temp
= gen_lowpart (imode
, temp
);
5445 /* Handle targets with different FP word orders. */
5446 if (FLOAT_WORDS_BIG_ENDIAN
)
5447 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5449 word
= bitpos
/ BITS_PER_WORD
;
5450 temp
= operand_subword_force (temp
, word
, fmode
);
5451 bitpos
= bitpos
% BITS_PER_WORD
;
5454 /* Force the intermediate word_mode (or narrower) result into a
5455 register. This avoids attempting to create paradoxical SUBREGs
5456 of floating point modes below. */
5457 temp
= force_reg (imode
, temp
);
5459 /* If the bitpos is within the "result mode" lowpart, the operation
5460 can be implement with a single bitwise AND. Otherwise, we need
5461 a right shift and an AND. */
5463 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5465 double_int mask
= double_int_setbit (double_int_zero
, bitpos
);
5467 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5468 temp
= gen_lowpart (rmode
, temp
);
5469 temp
= expand_binop (rmode
, and_optab
, temp
,
5470 immed_double_int_const (mask
, rmode
),
5471 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5475 /* Perform a logical right shift to place the signbit in the least
5476 significant bit, then truncate the result to the desired mode
5477 and mask just this bit. */
5478 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5479 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5480 temp
= gen_lowpart (rmode
, temp
);
5481 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5482 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5488 /* Expand fork or exec calls. TARGET is the desired target of the
5489 call. EXP is the call. FN is the
5490 identificator of the actual function. IGNORE is nonzero if the
5491 value is to be ignored. */
5494 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5499 /* If we are not profiling, just call the function. */
5500 if (!profile_arc_flag
)
5503 /* Otherwise call the wrapper. This should be equivalent for the rest of
5504 compiler, so the code does not diverge, and the wrapper may run the
5505 code necessary for keeping the profiling sane. */
5507 switch (DECL_FUNCTION_CODE (fn
))
5510 id
= get_identifier ("__gcov_fork");
5513 case BUILT_IN_EXECL
:
5514 id
= get_identifier ("__gcov_execl");
5517 case BUILT_IN_EXECV
:
5518 id
= get_identifier ("__gcov_execv");
5521 case BUILT_IN_EXECLP
:
5522 id
= get_identifier ("__gcov_execlp");
5525 case BUILT_IN_EXECLE
:
5526 id
= get_identifier ("__gcov_execle");
5529 case BUILT_IN_EXECVP
:
5530 id
= get_identifier ("__gcov_execvp");
5533 case BUILT_IN_EXECVE
:
5534 id
= get_identifier ("__gcov_execve");
5541 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5542 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5543 DECL_EXTERNAL (decl
) = 1;
5544 TREE_PUBLIC (decl
) = 1;
5545 DECL_ARTIFICIAL (decl
) = 1;
5546 TREE_NOTHROW (decl
) = 1;
5547 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5548 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5549 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5550 return expand_call (call
, target
, ignore
);
5555 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5556 the pointer in these functions is void*, the tree optimizers may remove
5557 casts. The mode computed in expand_builtin isn't reliable either, due
5558 to __sync_bool_compare_and_swap.
5560 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5561 group of builtins. This gives us log2 of the mode size. */
5563 static inline enum machine_mode
5564 get_builtin_sync_mode (int fcode_diff
)
5566 /* The size is not negotiable, so ask not to get BLKmode in return
5567 if the target indicates that a smaller size would be better. */
5568 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5571 /* Expand the memory expression LOC and return the appropriate memory operand
5572 for the builtin_sync operations. */
5575 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5579 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5580 addr
= convert_memory_address (Pmode
, addr
);
5582 /* Note that we explicitly do not want any alias information for this
5583 memory, so that we kill all other live memories. Otherwise we don't
5584 satisfy the full barrier semantics of the intrinsic. */
5585 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5587 /* The alignment needs to be at least according to that of the mode. */
5588 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5589 get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
)));
5590 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5591 MEM_VOLATILE_P (mem
) = 1;
5596 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5597 EXP is the CALL_EXPR. CODE is the rtx code
5598 that corresponds to the arithmetic or logical operation from the name;
5599 an exception here is that NOT actually means NAND. TARGET is an optional
5600 place for us to store the results; AFTER is true if this is the
5601 fetch_and_xxx form. IGNORE is true if we don't actually care about
5602 the result of the operation at all. */
5605 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5606 enum rtx_code code
, bool after
,
5607 rtx target
, bool ignore
)
5610 enum machine_mode old_mode
;
5611 location_t loc
= EXPR_LOCATION (exp
);
5613 if (code
== NOT
&& warn_sync_nand
)
5615 tree fndecl
= get_callee_fndecl (exp
);
5616 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5618 static bool warned_f_a_n
, warned_n_a_f
;
5622 case BUILT_IN_FETCH_AND_NAND_1
:
5623 case BUILT_IN_FETCH_AND_NAND_2
:
5624 case BUILT_IN_FETCH_AND_NAND_4
:
5625 case BUILT_IN_FETCH_AND_NAND_8
:
5626 case BUILT_IN_FETCH_AND_NAND_16
:
5631 fndecl
= implicit_built_in_decls
[BUILT_IN_FETCH_AND_NAND_N
];
5632 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5633 warned_f_a_n
= true;
5636 case BUILT_IN_NAND_AND_FETCH_1
:
5637 case BUILT_IN_NAND_AND_FETCH_2
:
5638 case BUILT_IN_NAND_AND_FETCH_4
:
5639 case BUILT_IN_NAND_AND_FETCH_8
:
5640 case BUILT_IN_NAND_AND_FETCH_16
:
5645 fndecl
= implicit_built_in_decls
[BUILT_IN_NAND_AND_FETCH_N
];
5646 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5647 warned_n_a_f
= true;
5655 /* Expand the operands. */
5656 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5658 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5659 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5660 of CONST_INTs, where we know the old_mode only from the call argument. */
5661 old_mode
= GET_MODE (val
);
5662 if (old_mode
== VOIDmode
)
5663 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5664 val
= convert_modes (mode
, old_mode
, val
, 1);
5667 return expand_sync_operation (mem
, val
, code
);
5669 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5672 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5673 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5674 true if this is the boolean form. TARGET is a place for us to store the
5675 results; this is NOT optional if IS_BOOL is true. */
5678 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5679 bool is_bool
, rtx target
)
5681 rtx old_val
, new_val
, mem
;
5682 enum machine_mode old_mode
;
5684 /* Expand the operands. */
5685 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5688 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
5689 mode
, EXPAND_NORMAL
);
5690 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5691 of CONST_INTs, where we know the old_mode only from the call argument. */
5692 old_mode
= GET_MODE (old_val
);
5693 if (old_mode
== VOIDmode
)
5694 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5695 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
5697 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
5698 mode
, EXPAND_NORMAL
);
5699 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5700 of CONST_INTs, where we know the old_mode only from the call argument. */
5701 old_mode
= GET_MODE (new_val
);
5702 if (old_mode
== VOIDmode
)
5703 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
5704 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
5707 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5709 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5712 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5713 general form is actually an atomic exchange, and some targets only
5714 support a reduced form with the second argument being a constant 1.
5715 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5719 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5723 enum machine_mode old_mode
;
5725 /* Expand the operands. */
5726 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5727 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5728 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5729 of CONST_INTs, where we know the old_mode only from the call argument. */
5730 old_mode
= GET_MODE (val
);
5731 if (old_mode
== VOIDmode
)
5732 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5733 val
= convert_modes (mode
, old_mode
, val
, 1);
5735 return expand_sync_lock_test_and_set (mem
, val
, target
);
5738 /* Expand the __sync_synchronize intrinsic. */
5741 expand_builtin_synchronize (void)
5744 VEC (tree
, gc
) *v_clobbers
;
5746 #ifdef HAVE_memory_barrier
5747 if (HAVE_memory_barrier
)
5749 emit_insn (gen_memory_barrier ());
5754 if (synchronize_libfunc
!= NULL_RTX
)
5756 emit_library_call (synchronize_libfunc
, LCT_NORMAL
, VOIDmode
, 0);
5760 /* If no explicit memory barrier instruction is available, create an
5761 empty asm stmt with a memory clobber. */
5762 v_clobbers
= VEC_alloc (tree
, gc
, 1);
5763 VEC_quick_push (tree
, v_clobbers
,
5764 tree_cons (NULL
, build_string (6, "memory"), NULL
));
5765 x
= gimple_build_asm_vec ("", NULL
, NULL
, v_clobbers
, NULL
);
5766 gimple_asm_set_volatile (x
, true);
5767 expand_asm_stmt (x
);
5770 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5773 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
5775 enum insn_code icode
;
5777 rtx val
= const0_rtx
;
5779 /* Expand the operands. */
5780 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5782 /* If there is an explicit operation in the md file, use it. */
5783 icode
= direct_optab_handler (sync_lock_release_optab
, mode
);
5784 if (icode
!= CODE_FOR_nothing
)
5786 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
5787 val
= force_reg (mode
, val
);
5789 insn
= GEN_FCN (icode
) (mem
, val
);
5797 /* Otherwise we can implement this operation by emitting a barrier
5798 followed by a store of zero. */
5799 expand_builtin_synchronize ();
5800 emit_move_insn (mem
, val
);
5803 /* Expand an expression EXP that calls a built-in function,
5804 with result going to TARGET if that's convenient
5805 (and in mode MODE if that's convenient).
5806 SUBTARGET may be used as the target for computing one of EXP's operands.
5807 IGNORE is nonzero if the value is to be ignored. */
5810 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5813 tree fndecl
= get_callee_fndecl (exp
);
5814 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5815 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5818 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5819 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5821 /* When not optimizing, generate calls to library functions for a certain
5824 && !called_as_built_in (fndecl
)
5825 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
5826 && fcode
!= BUILT_IN_ALLOCA
5827 && fcode
!= BUILT_IN_FREE
)
5828 return expand_call (exp
, target
, ignore
);
5830 /* The built-in function expanders test for target == const0_rtx
5831 to determine whether the function's result will be ignored. */
5833 target
= const0_rtx
;
5835 /* If the result of a pure or const built-in function is ignored, and
5836 none of its arguments are volatile, we can avoid expanding the
5837 built-in call and just evaluate the arguments for side-effects. */
5838 if (target
== const0_rtx
5839 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5840 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5842 bool volatilep
= false;
5844 call_expr_arg_iterator iter
;
5846 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5847 if (TREE_THIS_VOLATILE (arg
))
5855 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5856 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5863 CASE_FLT_FN (BUILT_IN_FABS
):
5864 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5869 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5870 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5875 /* Just do a normal library call if we were unable to fold
5877 CASE_FLT_FN (BUILT_IN_CABS
):
5880 CASE_FLT_FN (BUILT_IN_EXP
):
5881 CASE_FLT_FN (BUILT_IN_EXP10
):
5882 CASE_FLT_FN (BUILT_IN_POW10
):
5883 CASE_FLT_FN (BUILT_IN_EXP2
):
5884 CASE_FLT_FN (BUILT_IN_EXPM1
):
5885 CASE_FLT_FN (BUILT_IN_LOGB
):
5886 CASE_FLT_FN (BUILT_IN_LOG
):
5887 CASE_FLT_FN (BUILT_IN_LOG10
):
5888 CASE_FLT_FN (BUILT_IN_LOG2
):
5889 CASE_FLT_FN (BUILT_IN_LOG1P
):
5890 CASE_FLT_FN (BUILT_IN_TAN
):
5891 CASE_FLT_FN (BUILT_IN_ASIN
):
5892 CASE_FLT_FN (BUILT_IN_ACOS
):
5893 CASE_FLT_FN (BUILT_IN_ATAN
):
5894 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5895 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5896 because of possible accuracy problems. */
5897 if (! flag_unsafe_math_optimizations
)
5899 CASE_FLT_FN (BUILT_IN_SQRT
):
5900 CASE_FLT_FN (BUILT_IN_FLOOR
):
5901 CASE_FLT_FN (BUILT_IN_CEIL
):
5902 CASE_FLT_FN (BUILT_IN_TRUNC
):
5903 CASE_FLT_FN (BUILT_IN_ROUND
):
5904 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5905 CASE_FLT_FN (BUILT_IN_RINT
):
5906 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5911 CASE_FLT_FN (BUILT_IN_FMA
):
5912 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5917 CASE_FLT_FN (BUILT_IN_ILOGB
):
5918 if (! flag_unsafe_math_optimizations
)
5920 CASE_FLT_FN (BUILT_IN_ISINF
):
5921 CASE_FLT_FN (BUILT_IN_FINITE
):
5922 case BUILT_IN_ISFINITE
:
5923 case BUILT_IN_ISNORMAL
:
5924 target
= expand_builtin_interclass_mathfn (exp
, target
);
5929 CASE_FLT_FN (BUILT_IN_LCEIL
):
5930 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5931 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5932 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5933 target
= expand_builtin_int_roundingfn (exp
, target
);
5938 CASE_FLT_FN (BUILT_IN_LRINT
):
5939 CASE_FLT_FN (BUILT_IN_LLRINT
):
5940 CASE_FLT_FN (BUILT_IN_LROUND
):
5941 CASE_FLT_FN (BUILT_IN_LLROUND
):
5942 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5947 CASE_FLT_FN (BUILT_IN_POW
):
5948 target
= expand_builtin_pow (exp
, target
, subtarget
);
5953 CASE_FLT_FN (BUILT_IN_POWI
):
5954 target
= expand_builtin_powi (exp
, target
);
5959 CASE_FLT_FN (BUILT_IN_ATAN2
):
5960 CASE_FLT_FN (BUILT_IN_LDEXP
):
5961 CASE_FLT_FN (BUILT_IN_SCALB
):
5962 CASE_FLT_FN (BUILT_IN_SCALBN
):
5963 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5964 if (! flag_unsafe_math_optimizations
)
5967 CASE_FLT_FN (BUILT_IN_FMOD
):
5968 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5969 CASE_FLT_FN (BUILT_IN_DREM
):
5970 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5975 CASE_FLT_FN (BUILT_IN_CEXPI
):
5976 target
= expand_builtin_cexpi (exp
, target
);
5977 gcc_assert (target
);
5980 CASE_FLT_FN (BUILT_IN_SIN
):
5981 CASE_FLT_FN (BUILT_IN_COS
):
5982 if (! flag_unsafe_math_optimizations
)
5984 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5989 CASE_FLT_FN (BUILT_IN_SINCOS
):
5990 if (! flag_unsafe_math_optimizations
)
5992 target
= expand_builtin_sincos (exp
);
5997 case BUILT_IN_APPLY_ARGS
:
5998 return expand_builtin_apply_args ();
6000 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6001 FUNCTION with a copy of the parameters described by
6002 ARGUMENTS, and ARGSIZE. It returns a block of memory
6003 allocated on the stack into which is stored all the registers
6004 that might possibly be used for returning the result of a
6005 function. ARGUMENTS is the value returned by
6006 __builtin_apply_args. ARGSIZE is the number of bytes of
6007 arguments that must be copied. ??? How should this value be
6008 computed? We'll also need a safe worst case value for varargs
6010 case BUILT_IN_APPLY
:
6011 if (!validate_arglist (exp
, POINTER_TYPE
,
6012 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6013 && !validate_arglist (exp
, REFERENCE_TYPE
,
6014 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6020 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6021 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6022 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6024 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6027 /* __builtin_return (RESULT) causes the function to return the
6028 value described by RESULT. RESULT is address of the block of
6029 memory returned by __builtin_apply. */
6030 case BUILT_IN_RETURN
:
6031 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6032 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6035 case BUILT_IN_SAVEREGS
:
6036 return expand_builtin_saveregs ();
6038 case BUILT_IN_VA_ARG_PACK
:
6039 /* All valid uses of __builtin_va_arg_pack () are removed during
6041 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6044 case BUILT_IN_VA_ARG_PACK_LEN
:
6045 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6047 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6050 /* Return the address of the first anonymous stack arg. */
6051 case BUILT_IN_NEXT_ARG
:
6052 if (fold_builtin_next_arg (exp
, false))
6054 return expand_builtin_next_arg ();
6056 case BUILT_IN_CLEAR_CACHE
:
6057 target
= expand_builtin___clear_cache (exp
);
6062 case BUILT_IN_CLASSIFY_TYPE
:
6063 return expand_builtin_classify_type (exp
);
6065 case BUILT_IN_CONSTANT_P
:
6068 case BUILT_IN_FRAME_ADDRESS
:
6069 case BUILT_IN_RETURN_ADDRESS
:
6070 return expand_builtin_frame_address (fndecl
, exp
);
6072 /* Returns the address of the area where the structure is returned.
6074 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6075 if (call_expr_nargs (exp
) != 0
6076 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6077 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6080 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6082 case BUILT_IN_ALLOCA
:
6083 /* If the allocation stems from the declaration of a variable-sized
6084 object, it cannot accumulate. */
6085 target
= expand_builtin_alloca (exp
, ALLOCA_FOR_VAR_P (exp
));
6090 case BUILT_IN_STACK_SAVE
:
6091 return expand_stack_save ();
6093 case BUILT_IN_STACK_RESTORE
:
6094 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6097 case BUILT_IN_BSWAP32
:
6098 case BUILT_IN_BSWAP64
:
6099 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6105 CASE_INT_FN (BUILT_IN_FFS
):
6106 case BUILT_IN_FFSIMAX
:
6107 target
= expand_builtin_unop (target_mode
, exp
, target
,
6108 subtarget
, ffs_optab
);
6113 CASE_INT_FN (BUILT_IN_CLZ
):
6114 case BUILT_IN_CLZIMAX
:
6115 target
= expand_builtin_unop (target_mode
, exp
, target
,
6116 subtarget
, clz_optab
);
6121 CASE_INT_FN (BUILT_IN_CTZ
):
6122 case BUILT_IN_CTZIMAX
:
6123 target
= expand_builtin_unop (target_mode
, exp
, target
,
6124 subtarget
, ctz_optab
);
6129 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6130 case BUILT_IN_POPCOUNTIMAX
:
6131 target
= expand_builtin_unop (target_mode
, exp
, target
,
6132 subtarget
, popcount_optab
);
6137 CASE_INT_FN (BUILT_IN_PARITY
):
6138 case BUILT_IN_PARITYIMAX
:
6139 target
= expand_builtin_unop (target_mode
, exp
, target
,
6140 subtarget
, parity_optab
);
6145 case BUILT_IN_STRLEN
:
6146 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6151 case BUILT_IN_STRCPY
:
6152 target
= expand_builtin_strcpy (exp
, target
);
6157 case BUILT_IN_STRNCPY
:
6158 target
= expand_builtin_strncpy (exp
, target
);
6163 case BUILT_IN_STPCPY
:
6164 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6169 case BUILT_IN_MEMCPY
:
6170 target
= expand_builtin_memcpy (exp
, target
);
6175 case BUILT_IN_MEMPCPY
:
6176 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6181 case BUILT_IN_MEMSET
:
6182 target
= expand_builtin_memset (exp
, target
, mode
);
6187 case BUILT_IN_BZERO
:
6188 target
= expand_builtin_bzero (exp
);
6193 case BUILT_IN_STRCMP
:
6194 target
= expand_builtin_strcmp (exp
, target
);
6199 case BUILT_IN_STRNCMP
:
6200 target
= expand_builtin_strncmp (exp
, target
, mode
);
6206 case BUILT_IN_MEMCMP
:
6207 target
= expand_builtin_memcmp (exp
, target
, mode
);
6212 case BUILT_IN_SETJMP
:
6213 /* This should have been lowered to the builtins below. */
6216 case BUILT_IN_SETJMP_SETUP
:
6217 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6218 and the receiver label. */
6219 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6221 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6222 VOIDmode
, EXPAND_NORMAL
);
6223 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6224 rtx label_r
= label_rtx (label
);
6226 /* This is copied from the handling of non-local gotos. */
6227 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6228 nonlocal_goto_handler_labels
6229 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6230 nonlocal_goto_handler_labels
);
6231 /* ??? Do not let expand_label treat us as such since we would
6232 not want to be both on the list of non-local labels and on
6233 the list of forced labels. */
6234 FORCED_LABEL (label
) = 0;
6239 case BUILT_IN_SETJMP_DISPATCHER
:
6240 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6241 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6243 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6244 rtx label_r
= label_rtx (label
);
6246 /* Remove the dispatcher label from the list of non-local labels
6247 since the receiver labels have been added to it above. */
6248 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6253 case BUILT_IN_SETJMP_RECEIVER
:
6254 /* __builtin_setjmp_receiver is passed the receiver label. */
6255 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6257 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6258 rtx label_r
= label_rtx (label
);
6260 expand_builtin_setjmp_receiver (label_r
);
6265 /* __builtin_longjmp is passed a pointer to an array of five words.
6266 It's similar to the C library longjmp function but works with
6267 __builtin_setjmp above. */
6268 case BUILT_IN_LONGJMP
:
6269 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6271 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6272 VOIDmode
, EXPAND_NORMAL
);
6273 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6275 if (value
!= const1_rtx
)
6277 error ("%<__builtin_longjmp%> second argument must be 1");
6281 expand_builtin_longjmp (buf_addr
, value
);
6286 case BUILT_IN_NONLOCAL_GOTO
:
6287 target
= expand_builtin_nonlocal_goto (exp
);
6292 /* This updates the setjmp buffer that is its argument with the value
6293 of the current stack pointer. */
6294 case BUILT_IN_UPDATE_SETJMP_BUF
:
6295 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6298 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6300 expand_builtin_update_setjmp_buf (buf_addr
);
6306 expand_builtin_trap ();
6309 case BUILT_IN_UNREACHABLE
:
6310 expand_builtin_unreachable ();
6313 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6314 case BUILT_IN_SIGNBITD32
:
6315 case BUILT_IN_SIGNBITD64
:
6316 case BUILT_IN_SIGNBITD128
:
6317 target
= expand_builtin_signbit (exp
, target
);
6322 /* Various hooks for the DWARF 2 __throw routine. */
6323 case BUILT_IN_UNWIND_INIT
:
6324 expand_builtin_unwind_init ();
6326 case BUILT_IN_DWARF_CFA
:
6327 return virtual_cfa_rtx
;
6328 #ifdef DWARF2_UNWIND_INFO
6329 case BUILT_IN_DWARF_SP_COLUMN
:
6330 return expand_builtin_dwarf_sp_column ();
6331 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6332 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6335 case BUILT_IN_FROB_RETURN_ADDR
:
6336 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6337 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6338 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6339 case BUILT_IN_EH_RETURN
:
6340 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6341 CALL_EXPR_ARG (exp
, 1));
6343 #ifdef EH_RETURN_DATA_REGNO
6344 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6345 return expand_builtin_eh_return_data_regno (exp
);
6347 case BUILT_IN_EXTEND_POINTER
:
6348 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6349 case BUILT_IN_EH_POINTER
:
6350 return expand_builtin_eh_pointer (exp
);
6351 case BUILT_IN_EH_FILTER
:
6352 return expand_builtin_eh_filter (exp
);
6353 case BUILT_IN_EH_COPY_VALUES
:
6354 return expand_builtin_eh_copy_values (exp
);
6356 case BUILT_IN_VA_START
:
6357 return expand_builtin_va_start (exp
);
6358 case BUILT_IN_VA_END
:
6359 return expand_builtin_va_end (exp
);
6360 case BUILT_IN_VA_COPY
:
6361 return expand_builtin_va_copy (exp
);
6362 case BUILT_IN_EXPECT
:
6363 return expand_builtin_expect (exp
, target
);
6364 case BUILT_IN_PREFETCH
:
6365 expand_builtin_prefetch (exp
);
6368 case BUILT_IN_PROFILE_FUNC_ENTER
:
6369 return expand_builtin_profile_func (false);
6370 case BUILT_IN_PROFILE_FUNC_EXIT
:
6371 return expand_builtin_profile_func (true);
6373 case BUILT_IN_INIT_TRAMPOLINE
:
6374 return expand_builtin_init_trampoline (exp
);
6375 case BUILT_IN_ADJUST_TRAMPOLINE
:
6376 return expand_builtin_adjust_trampoline (exp
);
6379 case BUILT_IN_EXECL
:
6380 case BUILT_IN_EXECV
:
6381 case BUILT_IN_EXECLP
:
6382 case BUILT_IN_EXECLE
:
6383 case BUILT_IN_EXECVP
:
6384 case BUILT_IN_EXECVE
:
6385 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6390 case BUILT_IN_FETCH_AND_ADD_1
:
6391 case BUILT_IN_FETCH_AND_ADD_2
:
6392 case BUILT_IN_FETCH_AND_ADD_4
:
6393 case BUILT_IN_FETCH_AND_ADD_8
:
6394 case BUILT_IN_FETCH_AND_ADD_16
:
6395 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6396 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6397 false, target
, ignore
);
6402 case BUILT_IN_FETCH_AND_SUB_1
:
6403 case BUILT_IN_FETCH_AND_SUB_2
:
6404 case BUILT_IN_FETCH_AND_SUB_4
:
6405 case BUILT_IN_FETCH_AND_SUB_8
:
6406 case BUILT_IN_FETCH_AND_SUB_16
:
6407 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6408 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6409 false, target
, ignore
);
6414 case BUILT_IN_FETCH_AND_OR_1
:
6415 case BUILT_IN_FETCH_AND_OR_2
:
6416 case BUILT_IN_FETCH_AND_OR_4
:
6417 case BUILT_IN_FETCH_AND_OR_8
:
6418 case BUILT_IN_FETCH_AND_OR_16
:
6419 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6420 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6421 false, target
, ignore
);
6426 case BUILT_IN_FETCH_AND_AND_1
:
6427 case BUILT_IN_FETCH_AND_AND_2
:
6428 case BUILT_IN_FETCH_AND_AND_4
:
6429 case BUILT_IN_FETCH_AND_AND_8
:
6430 case BUILT_IN_FETCH_AND_AND_16
:
6431 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6432 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6433 false, target
, ignore
);
6438 case BUILT_IN_FETCH_AND_XOR_1
:
6439 case BUILT_IN_FETCH_AND_XOR_2
:
6440 case BUILT_IN_FETCH_AND_XOR_4
:
6441 case BUILT_IN_FETCH_AND_XOR_8
:
6442 case BUILT_IN_FETCH_AND_XOR_16
:
6443 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6444 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6445 false, target
, ignore
);
6450 case BUILT_IN_FETCH_AND_NAND_1
:
6451 case BUILT_IN_FETCH_AND_NAND_2
:
6452 case BUILT_IN_FETCH_AND_NAND_4
:
6453 case BUILT_IN_FETCH_AND_NAND_8
:
6454 case BUILT_IN_FETCH_AND_NAND_16
:
6455 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6456 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6457 false, target
, ignore
);
6462 case BUILT_IN_ADD_AND_FETCH_1
:
6463 case BUILT_IN_ADD_AND_FETCH_2
:
6464 case BUILT_IN_ADD_AND_FETCH_4
:
6465 case BUILT_IN_ADD_AND_FETCH_8
:
6466 case BUILT_IN_ADD_AND_FETCH_16
:
6467 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6468 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6469 true, target
, ignore
);
6474 case BUILT_IN_SUB_AND_FETCH_1
:
6475 case BUILT_IN_SUB_AND_FETCH_2
:
6476 case BUILT_IN_SUB_AND_FETCH_4
:
6477 case BUILT_IN_SUB_AND_FETCH_8
:
6478 case BUILT_IN_SUB_AND_FETCH_16
:
6479 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6480 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6481 true, target
, ignore
);
6486 case BUILT_IN_OR_AND_FETCH_1
:
6487 case BUILT_IN_OR_AND_FETCH_2
:
6488 case BUILT_IN_OR_AND_FETCH_4
:
6489 case BUILT_IN_OR_AND_FETCH_8
:
6490 case BUILT_IN_OR_AND_FETCH_16
:
6491 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6492 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6493 true, target
, ignore
);
6498 case BUILT_IN_AND_AND_FETCH_1
:
6499 case BUILT_IN_AND_AND_FETCH_2
:
6500 case BUILT_IN_AND_AND_FETCH_4
:
6501 case BUILT_IN_AND_AND_FETCH_8
:
6502 case BUILT_IN_AND_AND_FETCH_16
:
6503 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6504 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6505 true, target
, ignore
);
6510 case BUILT_IN_XOR_AND_FETCH_1
:
6511 case BUILT_IN_XOR_AND_FETCH_2
:
6512 case BUILT_IN_XOR_AND_FETCH_4
:
6513 case BUILT_IN_XOR_AND_FETCH_8
:
6514 case BUILT_IN_XOR_AND_FETCH_16
:
6515 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6516 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6517 true, target
, ignore
);
6522 case BUILT_IN_NAND_AND_FETCH_1
:
6523 case BUILT_IN_NAND_AND_FETCH_2
:
6524 case BUILT_IN_NAND_AND_FETCH_4
:
6525 case BUILT_IN_NAND_AND_FETCH_8
:
6526 case BUILT_IN_NAND_AND_FETCH_16
:
6527 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6528 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6529 true, target
, ignore
);
6534 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6535 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6536 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6537 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6538 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6539 if (mode
== VOIDmode
)
6540 mode
= TYPE_MODE (boolean_type_node
);
6541 if (!target
|| !register_operand (target
, mode
))
6542 target
= gen_reg_rtx (mode
);
6544 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6545 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6550 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6551 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6552 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6553 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6554 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6555 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6556 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6561 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6562 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6563 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6564 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6565 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6566 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6567 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6572 case BUILT_IN_LOCK_RELEASE_1
:
6573 case BUILT_IN_LOCK_RELEASE_2
:
6574 case BUILT_IN_LOCK_RELEASE_4
:
6575 case BUILT_IN_LOCK_RELEASE_8
:
6576 case BUILT_IN_LOCK_RELEASE_16
:
6577 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6578 expand_builtin_lock_release (mode
, exp
);
6581 case BUILT_IN_SYNCHRONIZE
:
6582 expand_builtin_synchronize ();
6585 case BUILT_IN_OBJECT_SIZE
:
6586 return expand_builtin_object_size (exp
);
6588 case BUILT_IN_MEMCPY_CHK
:
6589 case BUILT_IN_MEMPCPY_CHK
:
6590 case BUILT_IN_MEMMOVE_CHK
:
6591 case BUILT_IN_MEMSET_CHK
:
6592 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6597 case BUILT_IN_STRCPY_CHK
:
6598 case BUILT_IN_STPCPY_CHK
:
6599 case BUILT_IN_STRNCPY_CHK
:
6600 case BUILT_IN_STRCAT_CHK
:
6601 case BUILT_IN_STRNCAT_CHK
:
6602 case BUILT_IN_SNPRINTF_CHK
:
6603 case BUILT_IN_VSNPRINTF_CHK
:
6604 maybe_emit_chk_warning (exp
, fcode
);
6607 case BUILT_IN_SPRINTF_CHK
:
6608 case BUILT_IN_VSPRINTF_CHK
:
6609 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6613 maybe_emit_free_warning (exp
);
6616 default: /* just do library call, if unknown builtin */
6620 /* The switch statement above can drop through to cause the function
6621 to be called normally. */
6622 return expand_call (exp
, target
, ignore
);
6625 /* Determine whether a tree node represents a call to a built-in
6626 function. If the tree T is a call to a built-in function with
6627 the right number of arguments of the appropriate types, return
6628 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6629 Otherwise the return value is END_BUILTINS. */
6631 enum built_in_function
6632 builtin_mathfn_code (const_tree t
)
6634 const_tree fndecl
, arg
, parmlist
;
6635 const_tree argtype
, parmtype
;
6636 const_call_expr_arg_iterator iter
;
6638 if (TREE_CODE (t
) != CALL_EXPR
6639 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6640 return END_BUILTINS
;
6642 fndecl
= get_callee_fndecl (t
);
6643 if (fndecl
== NULL_TREE
6644 || TREE_CODE (fndecl
) != FUNCTION_DECL
6645 || ! DECL_BUILT_IN (fndecl
)
6646 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6647 return END_BUILTINS
;
6649 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6650 init_const_call_expr_arg_iterator (t
, &iter
);
6651 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6653 /* If a function doesn't take a variable number of arguments,
6654 the last element in the list will have type `void'. */
6655 parmtype
= TREE_VALUE (parmlist
);
6656 if (VOID_TYPE_P (parmtype
))
6658 if (more_const_call_expr_args_p (&iter
))
6659 return END_BUILTINS
;
6660 return DECL_FUNCTION_CODE (fndecl
);
6663 if (! more_const_call_expr_args_p (&iter
))
6664 return END_BUILTINS
;
6666 arg
= next_const_call_expr_arg (&iter
);
6667 argtype
= TREE_TYPE (arg
);
6669 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6671 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6672 return END_BUILTINS
;
6674 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6676 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6677 return END_BUILTINS
;
6679 else if (POINTER_TYPE_P (parmtype
))
6681 if (! POINTER_TYPE_P (argtype
))
6682 return END_BUILTINS
;
6684 else if (INTEGRAL_TYPE_P (parmtype
))
6686 if (! INTEGRAL_TYPE_P (argtype
))
6687 return END_BUILTINS
;
6690 return END_BUILTINS
;
6693 /* Variable-length argument list. */
6694 return DECL_FUNCTION_CODE (fndecl
);
6697 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6698 evaluate to a constant. */
6701 fold_builtin_constant_p (tree arg
)
6703 /* We return 1 for a numeric type that's known to be a constant
6704 value at compile-time or for an aggregate type that's a
6705 literal constant. */
6708 /* If we know this is a constant, emit the constant of one. */
6709 if (CONSTANT_CLASS_P (arg
)
6710 || (TREE_CODE (arg
) == CONSTRUCTOR
6711 && TREE_CONSTANT (arg
)))
6712 return integer_one_node
;
6713 if (TREE_CODE (arg
) == ADDR_EXPR
)
6715 tree op
= TREE_OPERAND (arg
, 0);
6716 if (TREE_CODE (op
) == STRING_CST
6717 || (TREE_CODE (op
) == ARRAY_REF
6718 && integer_zerop (TREE_OPERAND (op
, 1))
6719 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6720 return integer_one_node
;
6723 /* If this expression has side effects, show we don't know it to be a
6724 constant. Likewise if it's a pointer or aggregate type since in
6725 those case we only want literals, since those are only optimized
6726 when generating RTL, not later.
6727 And finally, if we are compiling an initializer, not code, we
6728 need to return a definite result now; there's not going to be any
6729 more optimization done. */
6730 if (TREE_SIDE_EFFECTS (arg
)
6731 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6732 || POINTER_TYPE_P (TREE_TYPE (arg
))
6734 || folding_initializer
)
6735 return integer_zero_node
;
6740 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6741 return it as a truthvalue. */
6744 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6746 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6748 fn
= built_in_decls
[BUILT_IN_EXPECT
];
6749 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6750 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6751 pred_type
= TREE_VALUE (arg_types
);
6752 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6754 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6755 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6756 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
6758 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6759 build_int_cst (ret_type
, 0));
6762 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6763 NULL_TREE if no simplification is possible. */
6766 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
6769 enum tree_code code
;
6771 /* If this is a builtin_expect within a builtin_expect keep the
6772 inner one. See through a comparison against a constant. It
6773 might have been added to create a thruthvalue. */
6775 if (COMPARISON_CLASS_P (inner
)
6776 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6777 inner
= TREE_OPERAND (inner
, 0);
6779 if (TREE_CODE (inner
) == CALL_EXPR
6780 && (fndecl
= get_callee_fndecl (inner
))
6781 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6782 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
6785 /* Distribute the expected value over short-circuiting operators.
6786 See through the cast from truthvalue_type_node to long. */
6788 while (TREE_CODE (inner
) == NOP_EXPR
6789 && INTEGRAL_TYPE_P (TREE_TYPE (inner
))
6790 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner
, 0))))
6791 inner
= TREE_OPERAND (inner
, 0);
6793 code
= TREE_CODE (inner
);
6794 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6796 tree op0
= TREE_OPERAND (inner
, 0);
6797 tree op1
= TREE_OPERAND (inner
, 1);
6799 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
6800 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
6801 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
6803 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
6806 /* If the argument isn't invariant then there's nothing else we can do. */
6807 if (!TREE_CONSTANT (arg0
))
6810 /* If we expect that a comparison against the argument will fold to
6811 a constant return the constant. In practice, this means a true
6812 constant or the address of a non-weak symbol. */
6815 if (TREE_CODE (inner
) == ADDR_EXPR
)
6819 inner
= TREE_OPERAND (inner
, 0);
6821 while (TREE_CODE (inner
) == COMPONENT_REF
6822 || TREE_CODE (inner
) == ARRAY_REF
);
6823 if ((TREE_CODE (inner
) == VAR_DECL
6824 || TREE_CODE (inner
) == FUNCTION_DECL
)
6825 && DECL_WEAK (inner
))
6829 /* Otherwise, ARG0 already has the proper type for the return value. */
6833 /* Fold a call to __builtin_classify_type with argument ARG. */
6836 fold_builtin_classify_type (tree arg
)
6839 return build_int_cst (NULL_TREE
, no_type_class
);
6841 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
6844 /* Fold a call to __builtin_strlen with argument ARG. */
6847 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
6849 if (!validate_arg (arg
, POINTER_TYPE
))
6853 tree len
= c_strlen (arg
, 0);
6856 return fold_convert_loc (loc
, type
, len
);
6862 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6865 fold_builtin_inf (location_t loc
, tree type
, int warn
)
6867 REAL_VALUE_TYPE real
;
6869 /* __builtin_inff is intended to be usable to define INFINITY on all
6870 targets. If an infinity is not available, INFINITY expands "to a
6871 positive constant of type float that overflows at translation
6872 time", footnote "In this case, using INFINITY will violate the
6873 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6874 Thus we pedwarn to ensure this constraint violation is
6876 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
6877 pedwarn (loc
, 0, "target format does not support infinity");
6880 return build_real (type
, real
);
6883 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6886 fold_builtin_nan (tree arg
, tree type
, int quiet
)
6888 REAL_VALUE_TYPE real
;
6891 if (!validate_arg (arg
, POINTER_TYPE
))
6893 str
= c_getstr (arg
);
6897 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
6900 return build_real (type
, real
);
6903 /* Return true if the floating point expression T has an integer value.
6904 We also allow +Inf, -Inf and NaN to be considered integer values. */
6907 integer_valued_real_p (tree t
)
6909 switch (TREE_CODE (t
))
6916 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6921 return integer_valued_real_p (TREE_OPERAND (t
, 1));
6928 return integer_valued_real_p (TREE_OPERAND (t
, 0))
6929 && integer_valued_real_p (TREE_OPERAND (t
, 1));
6932 return integer_valued_real_p (TREE_OPERAND (t
, 1))
6933 && integer_valued_real_p (TREE_OPERAND (t
, 2));
6936 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
6940 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6941 if (TREE_CODE (type
) == INTEGER_TYPE
)
6943 if (TREE_CODE (type
) == REAL_TYPE
)
6944 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6949 switch (builtin_mathfn_code (t
))
6951 CASE_FLT_FN (BUILT_IN_CEIL
):
6952 CASE_FLT_FN (BUILT_IN_FLOOR
):
6953 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6954 CASE_FLT_FN (BUILT_IN_RINT
):
6955 CASE_FLT_FN (BUILT_IN_ROUND
):
6956 CASE_FLT_FN (BUILT_IN_TRUNC
):
6959 CASE_FLT_FN (BUILT_IN_FMIN
):
6960 CASE_FLT_FN (BUILT_IN_FMAX
):
6961 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
6962 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
6975 /* FNDECL is assumed to be a builtin where truncation can be propagated
6976 across (for instance floor((double)f) == (double)floorf (f).
6977 Do the transformation for a call with argument ARG. */
6980 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
6982 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6984 if (!validate_arg (arg
, REAL_TYPE
))
6987 /* Integer rounding functions are idempotent. */
6988 if (fcode
== builtin_mathfn_code (arg
))
6991 /* If argument is already integer valued, and we don't need to worry
6992 about setting errno, there's no need to perform rounding. */
6993 if (! flag_errno_math
&& integer_valued_real_p (arg
))
6998 tree arg0
= strip_float_extensions (arg
);
6999 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7000 tree newtype
= TREE_TYPE (arg0
);
7003 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7004 && (decl
= mathfn_built_in (newtype
, fcode
)))
7005 return fold_convert_loc (loc
, ftype
,
7006 build_call_expr_loc (loc
, decl
, 1,
7007 fold_convert_loc (loc
,
7014 /* FNDECL is assumed to be builtin which can narrow the FP type of
7015 the argument, for instance lround((double)f) -> lroundf (f).
7016 Do the transformation for a call with argument ARG. */
7019 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7021 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7023 if (!validate_arg (arg
, REAL_TYPE
))
7026 /* If argument is already integer valued, and we don't need to worry
7027 about setting errno, there's no need to perform rounding. */
7028 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7029 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7030 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7034 tree ftype
= TREE_TYPE (arg
);
7035 tree arg0
= strip_float_extensions (arg
);
7036 tree newtype
= TREE_TYPE (arg0
);
7039 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7040 && (decl
= mathfn_built_in (newtype
, fcode
)))
7041 return build_call_expr_loc (loc
, decl
, 1,
7042 fold_convert_loc (loc
, newtype
, arg0
));
7045 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7046 sizeof (long long) == sizeof (long). */
7047 if (TYPE_PRECISION (long_long_integer_type_node
)
7048 == TYPE_PRECISION (long_integer_type_node
))
7050 tree newfn
= NULL_TREE
;
7053 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7054 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7057 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7058 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7061 CASE_FLT_FN (BUILT_IN_LLROUND
):
7062 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7065 CASE_FLT_FN (BUILT_IN_LLRINT
):
7066 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7075 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7076 return fold_convert_loc (loc
,
7077 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7084 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7085 return type. Return NULL_TREE if no simplification can be made. */
7088 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7092 if (!validate_arg (arg
, COMPLEX_TYPE
)
7093 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7096 /* Calculate the result when the argument is a constant. */
7097 if (TREE_CODE (arg
) == COMPLEX_CST
7098 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7102 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7104 tree real
= TREE_OPERAND (arg
, 0);
7105 tree imag
= TREE_OPERAND (arg
, 1);
7107 /* If either part is zero, cabs is fabs of the other. */
7108 if (real_zerop (real
))
7109 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7110 if (real_zerop (imag
))
7111 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7113 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7114 if (flag_unsafe_math_optimizations
7115 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7117 const REAL_VALUE_TYPE sqrt2_trunc
7118 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7120 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7121 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7122 build_real (type
, sqrt2_trunc
));
7126 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7127 if (TREE_CODE (arg
) == NEGATE_EXPR
7128 || TREE_CODE (arg
) == CONJ_EXPR
)
7129 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7131 /* Don't do this when optimizing for size. */
7132 if (flag_unsafe_math_optimizations
7133 && optimize
&& optimize_function_for_speed_p (cfun
))
7135 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7137 if (sqrtfn
!= NULL_TREE
)
7139 tree rpart
, ipart
, result
;
7141 arg
= builtin_save_expr (arg
);
7143 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7144 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7146 rpart
= builtin_save_expr (rpart
);
7147 ipart
= builtin_save_expr (ipart
);
7149 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7150 fold_build2_loc (loc
, MULT_EXPR
, type
,
7152 fold_build2_loc (loc
, MULT_EXPR
, type
,
7155 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7162 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7163 complex tree type of the result. If NEG is true, the imaginary
7164 zero is negative. */
7167 build_complex_cproj (tree type
, bool neg
)
7169 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7173 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7174 build_real (TREE_TYPE (type
), rzero
));
7177 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7178 return type. Return NULL_TREE if no simplification can be made. */
7181 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7183 if (!validate_arg (arg
, COMPLEX_TYPE
)
7184 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7187 /* If there are no infinities, return arg. */
7188 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7189 return non_lvalue_loc (loc
, arg
);
7191 /* Calculate the result when the argument is a constant. */
7192 if (TREE_CODE (arg
) == COMPLEX_CST
)
7194 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7195 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7197 if (real_isinf (real
) || real_isinf (imag
))
7198 return build_complex_cproj (type
, imag
->sign
);
7202 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7204 tree real
= TREE_OPERAND (arg
, 0);
7205 tree imag
= TREE_OPERAND (arg
, 1);
7210 /* If the real part is inf and the imag part is known to be
7211 nonnegative, return (inf + 0i). Remember side-effects are
7212 possible in the imag part. */
7213 if (TREE_CODE (real
) == REAL_CST
7214 && real_isinf (TREE_REAL_CST_PTR (real
))
7215 && tree_expr_nonnegative_p (imag
))
7216 return omit_one_operand_loc (loc
, type
,
7217 build_complex_cproj (type
, false),
7220 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7221 Remember side-effects are possible in the real part. */
7222 if (TREE_CODE (imag
) == REAL_CST
7223 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7225 omit_one_operand_loc (loc
, type
,
7226 build_complex_cproj (type
, TREE_REAL_CST_PTR
7227 (imag
)->sign
), arg
);
7233 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7234 Return NULL_TREE if no simplification can be made. */
7237 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7240 enum built_in_function fcode
;
7243 if (!validate_arg (arg
, REAL_TYPE
))
7246 /* Calculate the result when the argument is a constant. */
7247 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7250 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7251 fcode
= builtin_mathfn_code (arg
);
7252 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7254 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7255 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7256 CALL_EXPR_ARG (arg
, 0),
7257 build_real (type
, dconsthalf
));
7258 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7261 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7262 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7264 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7268 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7270 /* The inner root was either sqrt or cbrt. */
7271 /* This was a conditional expression but it triggered a bug
7273 REAL_VALUE_TYPE dconstroot
;
7274 if (BUILTIN_SQRT_P (fcode
))
7275 dconstroot
= dconsthalf
;
7277 dconstroot
= dconst_third ();
7279 /* Adjust for the outer root. */
7280 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7281 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7282 tree_root
= build_real (type
, dconstroot
);
7283 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7287 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7288 if (flag_unsafe_math_optimizations
7289 && (fcode
== BUILT_IN_POW
7290 || fcode
== BUILT_IN_POWF
7291 || fcode
== BUILT_IN_POWL
))
7293 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7294 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7295 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7297 if (!tree_expr_nonnegative_p (arg0
))
7298 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7299 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7300 build_real (type
, dconsthalf
));
7301 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7307 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7308 Return NULL_TREE if no simplification can be made. */
7311 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7313 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7316 if (!validate_arg (arg
, REAL_TYPE
))
7319 /* Calculate the result when the argument is a constant. */
7320 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7323 if (flag_unsafe_math_optimizations
)
7325 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7326 if (BUILTIN_EXPONENT_P (fcode
))
7328 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7329 const REAL_VALUE_TYPE third_trunc
=
7330 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7331 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7332 CALL_EXPR_ARG (arg
, 0),
7333 build_real (type
, third_trunc
));
7334 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7337 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7338 if (BUILTIN_SQRT_P (fcode
))
7340 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7344 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7346 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7348 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7349 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7350 tree_root
= build_real (type
, dconstroot
);
7351 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7355 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7356 if (BUILTIN_CBRT_P (fcode
))
7358 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7359 if (tree_expr_nonnegative_p (arg0
))
7361 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7366 REAL_VALUE_TYPE dconstroot
;
7368 real_arithmetic (&dconstroot
, MULT_EXPR
,
7369 dconst_third_ptr (), dconst_third_ptr ());
7370 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7371 tree_root
= build_real (type
, dconstroot
);
7372 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7377 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7378 if (fcode
== BUILT_IN_POW
7379 || fcode
== BUILT_IN_POWF
7380 || fcode
== BUILT_IN_POWL
)
7382 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7383 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7384 if (tree_expr_nonnegative_p (arg00
))
7386 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7387 const REAL_VALUE_TYPE dconstroot
7388 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7389 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7390 build_real (type
, dconstroot
));
7391 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7398 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7399 TYPE is the type of the return value. Return NULL_TREE if no
7400 simplification can be made. */
7403 fold_builtin_cos (location_t loc
,
7404 tree arg
, tree type
, tree fndecl
)
7408 if (!validate_arg (arg
, REAL_TYPE
))
7411 /* Calculate the result when the argument is a constant. */
7412 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7415 /* Optimize cos(-x) into cos (x). */
7416 if ((narg
= fold_strip_sign_ops (arg
)))
7417 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7422 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7423 Return NULL_TREE if no simplification can be made. */
7426 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7428 if (validate_arg (arg
, REAL_TYPE
))
7432 /* Calculate the result when the argument is a constant. */
7433 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7436 /* Optimize cosh(-x) into cosh (x). */
7437 if ((narg
= fold_strip_sign_ops (arg
)))
7438 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7444 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7445 argument ARG. TYPE is the type of the return value. Return
7446 NULL_TREE if no simplification can be made. */
7449 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7452 if (validate_arg (arg
, COMPLEX_TYPE
)
7453 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7457 /* Calculate the result when the argument is a constant. */
7458 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7461 /* Optimize fn(-x) into fn(x). */
7462 if ((tmp
= fold_strip_sign_ops (arg
)))
7463 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7469 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7470 Return NULL_TREE if no simplification can be made. */
7473 fold_builtin_tan (tree arg
, tree type
)
7475 enum built_in_function fcode
;
7478 if (!validate_arg (arg
, REAL_TYPE
))
7481 /* Calculate the result when the argument is a constant. */
7482 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7485 /* Optimize tan(atan(x)) = x. */
7486 fcode
= builtin_mathfn_code (arg
);
7487 if (flag_unsafe_math_optimizations
7488 && (fcode
== BUILT_IN_ATAN
7489 || fcode
== BUILT_IN_ATANF
7490 || fcode
== BUILT_IN_ATANL
))
7491 return CALL_EXPR_ARG (arg
, 0);
7496 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7497 NULL_TREE if no simplification can be made. */
7500 fold_builtin_sincos (location_t loc
,
7501 tree arg0
, tree arg1
, tree arg2
)
7506 if (!validate_arg (arg0
, REAL_TYPE
)
7507 || !validate_arg (arg1
, POINTER_TYPE
)
7508 || !validate_arg (arg2
, POINTER_TYPE
))
7511 type
= TREE_TYPE (arg0
);
7513 /* Calculate the result when the argument is a constant. */
7514 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7517 /* Canonicalize sincos to cexpi. */
7518 if (!TARGET_C99_FUNCTIONS
)
7520 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7524 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7525 call
= builtin_save_expr (call
);
7527 return build2 (COMPOUND_EXPR
, void_type_node
,
7528 build2 (MODIFY_EXPR
, void_type_node
,
7529 build_fold_indirect_ref_loc (loc
, arg1
),
7530 build1 (IMAGPART_EXPR
, type
, call
)),
7531 build2 (MODIFY_EXPR
, void_type_node
,
7532 build_fold_indirect_ref_loc (loc
, arg2
),
7533 build1 (REALPART_EXPR
, type
, call
)));
7536 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7537 NULL_TREE if no simplification can be made. */
7540 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7543 tree realp
, imagp
, ifn
;
7546 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7547 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7550 /* Calculate the result when the argument is a constant. */
7551 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7554 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7556 /* In case we can figure out the real part of arg0 and it is constant zero
7558 if (!TARGET_C99_FUNCTIONS
)
7560 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7564 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7565 && real_zerop (realp
))
7567 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7568 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7571 /* In case we can easily decompose real and imaginary parts split cexp
7572 to exp (r) * cexpi (i). */
7573 if (flag_unsafe_math_optimizations
7576 tree rfn
, rcall
, icall
;
7578 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7582 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7586 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7587 icall
= builtin_save_expr (icall
);
7588 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7589 rcall
= builtin_save_expr (rcall
);
7590 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7591 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7593 fold_build1_loc (loc
, REALPART_EXPR
,
7595 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7597 fold_build1_loc (loc
, IMAGPART_EXPR
,
7604 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7605 Return NULL_TREE if no simplification can be made. */
7608 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7610 if (!validate_arg (arg
, REAL_TYPE
))
7613 /* Optimize trunc of constant value. */
7614 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7616 REAL_VALUE_TYPE r
, x
;
7617 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7619 x
= TREE_REAL_CST (arg
);
7620 real_trunc (&r
, TYPE_MODE (type
), &x
);
7621 return build_real (type
, r
);
7624 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7627 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7628 Return NULL_TREE if no simplification can be made. */
7631 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7633 if (!validate_arg (arg
, REAL_TYPE
))
7636 /* Optimize floor of constant value. */
7637 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7641 x
= TREE_REAL_CST (arg
);
7642 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7644 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7647 real_floor (&r
, TYPE_MODE (type
), &x
);
7648 return build_real (type
, r
);
7652 /* Fold floor (x) where x is nonnegative to trunc (x). */
7653 if (tree_expr_nonnegative_p (arg
))
7655 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7657 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7660 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7663 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7664 Return NULL_TREE if no simplification can be made. */
7667 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7669 if (!validate_arg (arg
, REAL_TYPE
))
7672 /* Optimize ceil of constant value. */
7673 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7677 x
= TREE_REAL_CST (arg
);
7678 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7680 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7683 real_ceil (&r
, TYPE_MODE (type
), &x
);
7684 return build_real (type
, r
);
7688 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7691 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7692 Return NULL_TREE if no simplification can be made. */
7695 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7697 if (!validate_arg (arg
, REAL_TYPE
))
7700 /* Optimize round of constant value. */
7701 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7705 x
= TREE_REAL_CST (arg
);
7706 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7708 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7711 real_round (&r
, TYPE_MODE (type
), &x
);
7712 return build_real (type
, r
);
7716 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7719 /* Fold function call to builtin lround, lroundf or lroundl (or the
7720 corresponding long long versions) and other rounding functions. ARG
7721 is the argument to the call. Return NULL_TREE if no simplification
7725 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7727 if (!validate_arg (arg
, REAL_TYPE
))
7730 /* Optimize lround of constant value. */
7731 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7733 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7735 if (real_isfinite (&x
))
7737 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7738 tree ftype
= TREE_TYPE (arg
);
7742 switch (DECL_FUNCTION_CODE (fndecl
))
7744 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7745 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7746 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7749 CASE_FLT_FN (BUILT_IN_LCEIL
):
7750 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7751 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7754 CASE_FLT_FN (BUILT_IN_LROUND
):
7755 CASE_FLT_FN (BUILT_IN_LLROUND
):
7756 real_round (&r
, TYPE_MODE (ftype
), &x
);
7763 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
7764 if (double_int_fits_to_tree_p (itype
, val
))
7765 return double_int_to_tree (itype
, val
);
7769 switch (DECL_FUNCTION_CODE (fndecl
))
7771 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7772 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7773 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7774 if (tree_expr_nonnegative_p (arg
))
7775 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7776 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7781 return fold_fixed_mathfn (loc
, fndecl
, arg
);
7784 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7785 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7786 the argument to the call. Return NULL_TREE if no simplification can
7790 fold_builtin_bitop (tree fndecl
, tree arg
)
7792 if (!validate_arg (arg
, INTEGER_TYPE
))
7795 /* Optimize for constant argument. */
7796 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7798 HOST_WIDE_INT hi
, width
, result
;
7799 unsigned HOST_WIDE_INT lo
;
7802 type
= TREE_TYPE (arg
);
7803 width
= TYPE_PRECISION (type
);
7804 lo
= TREE_INT_CST_LOW (arg
);
7806 /* Clear all the bits that are beyond the type's precision. */
7807 if (width
> HOST_BITS_PER_WIDE_INT
)
7809 hi
= TREE_INT_CST_HIGH (arg
);
7810 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7811 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7816 if (width
< HOST_BITS_PER_WIDE_INT
)
7817 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7820 switch (DECL_FUNCTION_CODE (fndecl
))
7822 CASE_INT_FN (BUILT_IN_FFS
):
7824 result
= ffs_hwi (lo
);
7826 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
7831 CASE_INT_FN (BUILT_IN_CLZ
):
7833 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7835 result
= width
- floor_log2 (lo
) - 1;
7836 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7840 CASE_INT_FN (BUILT_IN_CTZ
):
7842 result
= ctz_hwi (lo
);
7844 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
7845 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7849 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7852 result
++, lo
&= lo
- 1;
7854 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
7857 CASE_INT_FN (BUILT_IN_PARITY
):
7860 result
++, lo
&= lo
- 1;
7862 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
7870 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7876 /* Fold function call to builtin_bswap and the long and long long
7877 variants. Return NULL_TREE if no simplification can be made. */
7879 fold_builtin_bswap (tree fndecl
, tree arg
)
7881 if (! validate_arg (arg
, INTEGER_TYPE
))
7884 /* Optimize constant value. */
7885 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7887 HOST_WIDE_INT hi
, width
, r_hi
= 0;
7888 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
7891 type
= TREE_TYPE (arg
);
7892 width
= TYPE_PRECISION (type
);
7893 lo
= TREE_INT_CST_LOW (arg
);
7894 hi
= TREE_INT_CST_HIGH (arg
);
7896 switch (DECL_FUNCTION_CODE (fndecl
))
7898 case BUILT_IN_BSWAP32
:
7899 case BUILT_IN_BSWAP64
:
7903 for (s
= 0; s
< width
; s
+= 8)
7905 int d
= width
- s
- 8;
7906 unsigned HOST_WIDE_INT byte
;
7908 if (s
< HOST_BITS_PER_WIDE_INT
)
7909 byte
= (lo
>> s
) & 0xff;
7911 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
7913 if (d
< HOST_BITS_PER_WIDE_INT
)
7916 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
7926 if (width
< HOST_BITS_PER_WIDE_INT
)
7927 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
7929 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
7935 /* A subroutine of fold_builtin to fold the various logarithmic
7936 functions. Return NULL_TREE if no simplification can me made.
7937 FUNC is the corresponding MPFR logarithm function. */
7940 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
7941 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
7943 if (validate_arg (arg
, REAL_TYPE
))
7945 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7947 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7949 /* Calculate the result when the argument is a constant. */
7950 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
7953 /* Special case, optimize logN(expN(x)) = x. */
7954 if (flag_unsafe_math_optimizations
7955 && ((func
== mpfr_log
7956 && (fcode
== BUILT_IN_EXP
7957 || fcode
== BUILT_IN_EXPF
7958 || fcode
== BUILT_IN_EXPL
))
7959 || (func
== mpfr_log2
7960 && (fcode
== BUILT_IN_EXP2
7961 || fcode
== BUILT_IN_EXP2F
7962 || fcode
== BUILT_IN_EXP2L
))
7963 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
7964 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
7966 /* Optimize logN(func()) for various exponential functions. We
7967 want to determine the value "x" and the power "exponent" in
7968 order to transform logN(x**exponent) into exponent*logN(x). */
7969 if (flag_unsafe_math_optimizations
)
7971 tree exponent
= 0, x
= 0;
7975 CASE_FLT_FN (BUILT_IN_EXP
):
7976 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7977 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
7979 exponent
= CALL_EXPR_ARG (arg
, 0);
7981 CASE_FLT_FN (BUILT_IN_EXP2
):
7982 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7983 x
= build_real (type
, dconst2
);
7984 exponent
= CALL_EXPR_ARG (arg
, 0);
7986 CASE_FLT_FN (BUILT_IN_EXP10
):
7987 CASE_FLT_FN (BUILT_IN_POW10
):
7988 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7990 REAL_VALUE_TYPE dconst10
;
7991 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
7992 x
= build_real (type
, dconst10
);
7994 exponent
= CALL_EXPR_ARG (arg
, 0);
7996 CASE_FLT_FN (BUILT_IN_SQRT
):
7997 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7998 x
= CALL_EXPR_ARG (arg
, 0);
7999 exponent
= build_real (type
, dconsthalf
);
8001 CASE_FLT_FN (BUILT_IN_CBRT
):
8002 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8003 x
= CALL_EXPR_ARG (arg
, 0);
8004 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8007 CASE_FLT_FN (BUILT_IN_POW
):
8008 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8009 x
= CALL_EXPR_ARG (arg
, 0);
8010 exponent
= CALL_EXPR_ARG (arg
, 1);
8016 /* Now perform the optimization. */
8019 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8020 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8028 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8029 NULL_TREE if no simplification can be made. */
8032 fold_builtin_hypot (location_t loc
, tree fndecl
,
8033 tree arg0
, tree arg1
, tree type
)
8035 tree res
, narg0
, narg1
;
8037 if (!validate_arg (arg0
, REAL_TYPE
)
8038 || !validate_arg (arg1
, REAL_TYPE
))
8041 /* Calculate the result when the argument is a constant. */
8042 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8045 /* If either argument to hypot has a negate or abs, strip that off.
8046 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8047 narg0
= fold_strip_sign_ops (arg0
);
8048 narg1
= fold_strip_sign_ops (arg1
);
8051 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8052 narg1
? narg1
: arg1
);
8055 /* If either argument is zero, hypot is fabs of the other. */
8056 if (real_zerop (arg0
))
8057 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8058 else if (real_zerop (arg1
))
8059 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8061 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8062 if (flag_unsafe_math_optimizations
8063 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8065 const REAL_VALUE_TYPE sqrt2_trunc
8066 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8067 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8068 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8069 build_real (type
, sqrt2_trunc
));
8076 /* Fold a builtin function call to pow, powf, or powl. Return
8077 NULL_TREE if no simplification can be made. */
8079 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8083 if (!validate_arg (arg0
, REAL_TYPE
)
8084 || !validate_arg (arg1
, REAL_TYPE
))
8087 /* Calculate the result when the argument is a constant. */
8088 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8091 /* Optimize pow(1.0,y) = 1.0. */
8092 if (real_onep (arg0
))
8093 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8095 if (TREE_CODE (arg1
) == REAL_CST
8096 && !TREE_OVERFLOW (arg1
))
8098 REAL_VALUE_TYPE cint
;
8102 c
= TREE_REAL_CST (arg1
);
8104 /* Optimize pow(x,0.0) = 1.0. */
8105 if (REAL_VALUES_EQUAL (c
, dconst0
))
8106 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8109 /* Optimize pow(x,1.0) = x. */
8110 if (REAL_VALUES_EQUAL (c
, dconst1
))
8113 /* Optimize pow(x,-1.0) = 1.0/x. */
8114 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8115 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8116 build_real (type
, dconst1
), arg0
);
8118 /* Optimize pow(x,0.5) = sqrt(x). */
8119 if (flag_unsafe_math_optimizations
8120 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8122 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8124 if (sqrtfn
!= NULL_TREE
)
8125 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8128 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8129 if (flag_unsafe_math_optimizations
)
8131 const REAL_VALUE_TYPE dconstroot
8132 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8134 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8136 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8137 if (cbrtfn
!= NULL_TREE
)
8138 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8142 /* Check for an integer exponent. */
8143 n
= real_to_integer (&c
);
8144 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8145 if (real_identical (&c
, &cint
))
8147 /* Attempt to evaluate pow at compile-time, unless this should
8148 raise an exception. */
8149 if (TREE_CODE (arg0
) == REAL_CST
8150 && !TREE_OVERFLOW (arg0
)
8152 || (!flag_trapping_math
&& !flag_errno_math
)
8153 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8158 x
= TREE_REAL_CST (arg0
);
8159 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8160 if (flag_unsafe_math_optimizations
|| !inexact
)
8161 return build_real (type
, x
);
8164 /* Strip sign ops from even integer powers. */
8165 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8167 tree narg0
= fold_strip_sign_ops (arg0
);
8169 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8174 if (flag_unsafe_math_optimizations
)
8176 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8178 /* Optimize pow(expN(x),y) = expN(x*y). */
8179 if (BUILTIN_EXPONENT_P (fcode
))
8181 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8182 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8183 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8184 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8187 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8188 if (BUILTIN_SQRT_P (fcode
))
8190 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8191 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8192 build_real (type
, dconsthalf
));
8193 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8196 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8197 if (BUILTIN_CBRT_P (fcode
))
8199 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8200 if (tree_expr_nonnegative_p (arg
))
8202 const REAL_VALUE_TYPE dconstroot
8203 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8204 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8205 build_real (type
, dconstroot
));
8206 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8210 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8211 if (fcode
== BUILT_IN_POW
8212 || fcode
== BUILT_IN_POWF
8213 || fcode
== BUILT_IN_POWL
)
8215 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8216 if (tree_expr_nonnegative_p (arg00
))
8218 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8219 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8220 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8228 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8229 Return NULL_TREE if no simplification can be made. */
8231 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8232 tree arg0
, tree arg1
, tree type
)
8234 if (!validate_arg (arg0
, REAL_TYPE
)
8235 || !validate_arg (arg1
, INTEGER_TYPE
))
8238 /* Optimize pow(1.0,y) = 1.0. */
8239 if (real_onep (arg0
))
8240 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8242 if (host_integerp (arg1
, 0))
8244 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8246 /* Evaluate powi at compile-time. */
8247 if (TREE_CODE (arg0
) == REAL_CST
8248 && !TREE_OVERFLOW (arg0
))
8251 x
= TREE_REAL_CST (arg0
);
8252 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8253 return build_real (type
, x
);
8256 /* Optimize pow(x,0) = 1.0. */
8258 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8261 /* Optimize pow(x,1) = x. */
8265 /* Optimize pow(x,-1) = 1.0/x. */
8267 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8268 build_real (type
, dconst1
), arg0
);
8274 /* A subroutine of fold_builtin to fold the various exponent
8275 functions. Return NULL_TREE if no simplification can be made.
8276 FUNC is the corresponding MPFR exponent function. */
8279 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8280 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8282 if (validate_arg (arg
, REAL_TYPE
))
8284 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8287 /* Calculate the result when the argument is a constant. */
8288 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8291 /* Optimize expN(logN(x)) = x. */
8292 if (flag_unsafe_math_optimizations
)
8294 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8296 if ((func
== mpfr_exp
8297 && (fcode
== BUILT_IN_LOG
8298 || fcode
== BUILT_IN_LOGF
8299 || fcode
== BUILT_IN_LOGL
))
8300 || (func
== mpfr_exp2
8301 && (fcode
== BUILT_IN_LOG2
8302 || fcode
== BUILT_IN_LOG2F
8303 || fcode
== BUILT_IN_LOG2L
))
8304 || (func
== mpfr_exp10
8305 && (fcode
== BUILT_IN_LOG10
8306 || fcode
== BUILT_IN_LOG10F
8307 || fcode
== BUILT_IN_LOG10L
)))
8308 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8315 /* Return true if VAR is a VAR_DECL or a component thereof. */
8318 var_decl_component_p (tree var
)
8321 while (handled_component_p (inner
))
8322 inner
= TREE_OPERAND (inner
, 0);
8323 return SSA_VAR_P (inner
);
8326 /* Fold function call to builtin memset. Return
8327 NULL_TREE if no simplification can be made. */
8330 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8331 tree type
, bool ignore
)
8333 tree var
, ret
, etype
;
8334 unsigned HOST_WIDE_INT length
, cval
;
8336 if (! validate_arg (dest
, POINTER_TYPE
)
8337 || ! validate_arg (c
, INTEGER_TYPE
)
8338 || ! validate_arg (len
, INTEGER_TYPE
))
8341 if (! host_integerp (len
, 1))
8344 /* If the LEN parameter is zero, return DEST. */
8345 if (integer_zerop (len
))
8346 return omit_one_operand_loc (loc
, type
, dest
, c
);
8348 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8353 if (TREE_CODE (var
) != ADDR_EXPR
)
8356 var
= TREE_OPERAND (var
, 0);
8357 if (TREE_THIS_VOLATILE (var
))
8360 etype
= TREE_TYPE (var
);
8361 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8362 etype
= TREE_TYPE (etype
);
8364 if (!INTEGRAL_TYPE_P (etype
)
8365 && !POINTER_TYPE_P (etype
))
8368 if (! var_decl_component_p (var
))
8371 length
= tree_low_cst (len
, 1);
8372 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8373 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8377 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8380 if (integer_zerop (c
))
8384 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8387 cval
= tree_low_cst (c
, 1);
8391 cval
|= (cval
<< 31) << 1;
8394 ret
= build_int_cst_type (etype
, cval
);
8395 var
= build_fold_indirect_ref_loc (loc
,
8396 fold_convert_loc (loc
,
8397 build_pointer_type (etype
),
8399 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8403 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8406 /* Fold function call to builtin memset. Return
8407 NULL_TREE if no simplification can be made. */
8410 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8412 if (! validate_arg (dest
, POINTER_TYPE
)
8413 || ! validate_arg (size
, INTEGER_TYPE
))
8419 /* New argument list transforming bzero(ptr x, int y) to
8420 memset(ptr x, int 0, size_t y). This is done this way
8421 so that if it isn't expanded inline, we fallback to
8422 calling bzero instead of memset. */
8424 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8425 fold_convert_loc (loc
, sizetype
, size
),
8426 void_type_node
, ignore
);
8429 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8430 NULL_TREE if no simplification can be made.
8431 If ENDP is 0, return DEST (like memcpy).
8432 If ENDP is 1, return DEST+LEN (like mempcpy).
8433 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8434 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8438 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8439 tree len
, tree type
, bool ignore
, int endp
)
8441 tree destvar
, srcvar
, expr
;
8443 if (! validate_arg (dest
, POINTER_TYPE
)
8444 || ! validate_arg (src
, POINTER_TYPE
)
8445 || ! validate_arg (len
, INTEGER_TYPE
))
8448 /* If the LEN parameter is zero, return DEST. */
8449 if (integer_zerop (len
))
8450 return omit_one_operand_loc (loc
, type
, dest
, src
);
8452 /* If SRC and DEST are the same (and not volatile), return
8453 DEST{,+LEN,+LEN-1}. */
8454 if (operand_equal_p (src
, dest
, 0))
8458 tree srctype
, desttype
;
8459 unsigned int src_align
, dest_align
;
8464 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8465 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8467 /* Both DEST and SRC must be pointer types.
8468 ??? This is what old code did. Is the testing for pointer types
8471 If either SRC is readonly or length is 1, we can use memcpy. */
8472 if (!dest_align
|| !src_align
)
8474 if (readonly_data_expr (src
)
8475 || (host_integerp (len
, 1)
8476 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8477 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8479 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8482 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8485 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8486 if (TREE_CODE (src
) == ADDR_EXPR
8487 && TREE_CODE (dest
) == ADDR_EXPR
)
8489 tree src_base
, dest_base
, fn
;
8490 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8491 HOST_WIDE_INT size
= -1;
8492 HOST_WIDE_INT maxsize
= -1;
8494 srcvar
= TREE_OPERAND (src
, 0);
8495 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8497 destvar
= TREE_OPERAND (dest
, 0);
8498 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8500 if (host_integerp (len
, 1))
8501 maxsize
= tree_low_cst (len
, 1);
8504 src_offset
/= BITS_PER_UNIT
;
8505 dest_offset
/= BITS_PER_UNIT
;
8506 if (SSA_VAR_P (src_base
)
8507 && SSA_VAR_P (dest_base
))
8509 if (operand_equal_p (src_base
, dest_base
, 0)
8510 && ranges_overlap_p (src_offset
, maxsize
,
8511 dest_offset
, maxsize
))
8514 else if (TREE_CODE (src_base
) == MEM_REF
8515 && TREE_CODE (dest_base
) == MEM_REF
)
8518 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8519 TREE_OPERAND (dest_base
, 0), 0))
8521 off
= double_int_add (mem_ref_offset (src_base
),
8522 shwi_to_double_int (src_offset
));
8523 if (!double_int_fits_in_shwi_p (off
))
8525 src_offset
= off
.low
;
8526 off
= double_int_add (mem_ref_offset (dest_base
),
8527 shwi_to_double_int (dest_offset
));
8528 if (!double_int_fits_in_shwi_p (off
))
8530 dest_offset
= off
.low
;
8531 if (ranges_overlap_p (src_offset
, maxsize
,
8532 dest_offset
, maxsize
))
8538 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8541 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8544 /* If the destination and source do not alias optimize into
8546 if ((is_gimple_min_invariant (dest
)
8547 || TREE_CODE (dest
) == SSA_NAME
)
8548 && (is_gimple_min_invariant (src
)
8549 || TREE_CODE (src
) == SSA_NAME
))
8552 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8553 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8554 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8557 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8560 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8567 if (!host_integerp (len
, 0))
8570 This logic lose for arguments like (type *)malloc (sizeof (type)),
8571 since we strip the casts of up to VOID return value from malloc.
8572 Perhaps we ought to inherit type from non-VOID argument here? */
8575 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8576 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8578 tree tem
= TREE_OPERAND (src
, 0);
8580 if (tem
!= TREE_OPERAND (src
, 0))
8581 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8583 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8585 tree tem
= TREE_OPERAND (dest
, 0);
8587 if (tem
!= TREE_OPERAND (dest
, 0))
8588 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8590 srctype
= TREE_TYPE (TREE_TYPE (src
));
8592 && TREE_CODE (srctype
) == ARRAY_TYPE
8593 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8595 srctype
= TREE_TYPE (srctype
);
8597 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8599 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8601 && TREE_CODE (desttype
) == ARRAY_TYPE
8602 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8604 desttype
= TREE_TYPE (desttype
);
8606 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8608 if (!srctype
|| !desttype
8609 || TREE_ADDRESSABLE (srctype
)
8610 || TREE_ADDRESSABLE (desttype
)
8611 || !TYPE_SIZE_UNIT (srctype
)
8612 || !TYPE_SIZE_UNIT (desttype
)
8613 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8614 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
)
8617 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8618 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8619 if (dest_align
< TYPE_ALIGN (desttype
)
8620 || src_align
< TYPE_ALIGN (srctype
))
8624 dest
= builtin_save_expr (dest
);
8626 /* Build accesses at offset zero with a ref-all character type. */
8627 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8628 ptr_mode
, true), 0);
8631 STRIP_NOPS (destvar
);
8632 if (TREE_CODE (destvar
) == ADDR_EXPR
8633 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8634 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8635 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8637 destvar
= NULL_TREE
;
8640 STRIP_NOPS (srcvar
);
8641 if (TREE_CODE (srcvar
) == ADDR_EXPR
8642 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8643 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8646 || src_align
>= TYPE_ALIGN (desttype
))
8647 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8649 else if (!STRICT_ALIGNMENT
)
8651 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8653 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8661 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8664 if (srcvar
== NULL_TREE
)
8667 if (src_align
>= TYPE_ALIGN (desttype
))
8668 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8671 if (STRICT_ALIGNMENT
)
8673 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8675 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
8678 else if (destvar
== NULL_TREE
)
8681 if (dest_align
>= TYPE_ALIGN (srctype
))
8682 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8685 if (STRICT_ALIGNMENT
)
8687 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
8689 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
8693 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
8699 if (endp
== 0 || endp
== 3)
8700 return omit_one_operand_loc (loc
, type
, dest
, expr
);
8706 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
8709 len
= fold_convert_loc (loc
, sizetype
, len
);
8710 dest
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8711 dest
= fold_convert_loc (loc
, type
, dest
);
8713 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
8717 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8718 If LEN is not NULL, it represents the length of the string to be
8719 copied. Return NULL_TREE if no simplification can be made. */
8722 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
8726 if (!validate_arg (dest
, POINTER_TYPE
)
8727 || !validate_arg (src
, POINTER_TYPE
))
8730 /* If SRC and DEST are the same (and not volatile), return DEST. */
8731 if (operand_equal_p (src
, dest
, 0))
8732 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8734 if (optimize_function_for_size_p (cfun
))
8737 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8743 len
= c_strlen (src
, 1);
8744 if (! len
|| TREE_SIDE_EFFECTS (len
))
8748 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
8749 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8750 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8753 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8754 Return NULL_TREE if no simplification can be made. */
8757 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8759 tree fn
, len
, lenp1
, call
, type
;
8761 if (!validate_arg (dest
, POINTER_TYPE
)
8762 || !validate_arg (src
, POINTER_TYPE
))
8765 len
= c_strlen (src
, 1);
8767 || TREE_CODE (len
) != INTEGER_CST
)
8770 if (optimize_function_for_size_p (cfun
)
8771 /* If length is zero it's small enough. */
8772 && !integer_zerop (len
))
8775 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8779 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
8780 /* We use dest twice in building our expression. Save it from
8781 multiple expansions. */
8782 dest
= builtin_save_expr (dest
);
8783 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
8785 type
= TREE_TYPE (TREE_TYPE (fndecl
));
8786 len
= fold_convert_loc (loc
, sizetype
, len
);
8787 dest
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8788 dest
= fold_convert_loc (loc
, type
, dest
);
8789 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
8793 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8794 If SLEN is not NULL, it represents the length of the source string.
8795 Return NULL_TREE if no simplification can be made. */
8798 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
8799 tree src
, tree len
, tree slen
)
8803 if (!validate_arg (dest
, POINTER_TYPE
)
8804 || !validate_arg (src
, POINTER_TYPE
)
8805 || !validate_arg (len
, INTEGER_TYPE
))
8808 /* If the LEN parameter is zero, return DEST. */
8809 if (integer_zerop (len
))
8810 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8812 /* We can't compare slen with len as constants below if len is not a
8814 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8818 slen
= c_strlen (src
, 1);
8820 /* Now, we must be passed a constant src ptr parameter. */
8821 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8824 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
8826 /* We do not support simplification of this case, though we do
8827 support it when expanding trees into RTL. */
8828 /* FIXME: generate a call to __builtin_memset. */
8829 if (tree_int_cst_lt (slen
, len
))
8832 /* OK transform into builtin memcpy. */
8833 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8836 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8837 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8840 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8841 arguments to the call, and TYPE is its return type.
8842 Return NULL_TREE if no simplification can be made. */
8845 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8847 if (!validate_arg (arg1
, POINTER_TYPE
)
8848 || !validate_arg (arg2
, INTEGER_TYPE
)
8849 || !validate_arg (len
, INTEGER_TYPE
))
8855 if (TREE_CODE (arg2
) != INTEGER_CST
8856 || !host_integerp (len
, 1))
8859 p1
= c_getstr (arg1
);
8860 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8866 if (target_char_cast (arg2
, &c
))
8869 r
= (char *) memchr (p1
, c
, tree_low_cst (len
, 1));
8872 return build_int_cst (TREE_TYPE (arg1
), 0);
8874 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
8876 return fold_convert_loc (loc
, type
, tem
);
8882 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8883 Return NULL_TREE if no simplification can be made. */
8886 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8888 const char *p1
, *p2
;
8890 if (!validate_arg (arg1
, POINTER_TYPE
)
8891 || !validate_arg (arg2
, POINTER_TYPE
)
8892 || !validate_arg (len
, INTEGER_TYPE
))
8895 /* If the LEN parameter is zero, return zero. */
8896 if (integer_zerop (len
))
8897 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8900 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8901 if (operand_equal_p (arg1
, arg2
, 0))
8902 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8904 p1
= c_getstr (arg1
);
8905 p2
= c_getstr (arg2
);
8907 /* If all arguments are constant, and the value of len is not greater
8908 than the lengths of arg1 and arg2, evaluate at compile-time. */
8909 if (host_integerp (len
, 1) && p1
&& p2
8910 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8911 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8913 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8916 return integer_one_node
;
8918 return integer_minus_one_node
;
8920 return integer_zero_node
;
8923 /* If len parameter is one, return an expression corresponding to
8924 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8925 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8927 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8928 tree cst_uchar_ptr_node
8929 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8932 = fold_convert_loc (loc
, integer_type_node
,
8933 build1 (INDIRECT_REF
, cst_uchar_node
,
8934 fold_convert_loc (loc
,
8938 = fold_convert_loc (loc
, integer_type_node
,
8939 build1 (INDIRECT_REF
, cst_uchar_node
,
8940 fold_convert_loc (loc
,
8943 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8949 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8950 Return NULL_TREE if no simplification can be made. */
8953 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8955 const char *p1
, *p2
;
8957 if (!validate_arg (arg1
, POINTER_TYPE
)
8958 || !validate_arg (arg2
, POINTER_TYPE
))
8961 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8962 if (operand_equal_p (arg1
, arg2
, 0))
8963 return integer_zero_node
;
8965 p1
= c_getstr (arg1
);
8966 p2
= c_getstr (arg2
);
8970 const int i
= strcmp (p1
, p2
);
8972 return integer_minus_one_node
;
8974 return integer_one_node
;
8976 return integer_zero_node
;
8979 /* If the second arg is "", return *(const unsigned char*)arg1. */
8980 if (p2
&& *p2
== '\0')
8982 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8983 tree cst_uchar_ptr_node
8984 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8986 return fold_convert_loc (loc
, integer_type_node
,
8987 build1 (INDIRECT_REF
, cst_uchar_node
,
8988 fold_convert_loc (loc
,
8993 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8994 if (p1
&& *p1
== '\0')
8996 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8997 tree cst_uchar_ptr_node
8998 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9001 = fold_convert_loc (loc
, integer_type_node
,
9002 build1 (INDIRECT_REF
, cst_uchar_node
,
9003 fold_convert_loc (loc
,
9006 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9012 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9013 Return NULL_TREE if no simplification can be made. */
9016 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9018 const char *p1
, *p2
;
9020 if (!validate_arg (arg1
, POINTER_TYPE
)
9021 || !validate_arg (arg2
, POINTER_TYPE
)
9022 || !validate_arg (len
, INTEGER_TYPE
))
9025 /* If the LEN parameter is zero, return zero. */
9026 if (integer_zerop (len
))
9027 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9030 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9031 if (operand_equal_p (arg1
, arg2
, 0))
9032 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9034 p1
= c_getstr (arg1
);
9035 p2
= c_getstr (arg2
);
9037 if (host_integerp (len
, 1) && p1
&& p2
)
9039 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9041 return integer_one_node
;
9043 return integer_minus_one_node
;
9045 return integer_zero_node
;
9048 /* If the second arg is "", and the length is greater than zero,
9049 return *(const unsigned char*)arg1. */
9050 if (p2
&& *p2
== '\0'
9051 && TREE_CODE (len
) == INTEGER_CST
9052 && tree_int_cst_sgn (len
) == 1)
9054 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9055 tree cst_uchar_ptr_node
9056 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9058 return fold_convert_loc (loc
, integer_type_node
,
9059 build1 (INDIRECT_REF
, cst_uchar_node
,
9060 fold_convert_loc (loc
,
9065 /* If the first arg is "", and the length is greater than zero,
9066 return -*(const unsigned char*)arg2. */
9067 if (p1
&& *p1
== '\0'
9068 && TREE_CODE (len
) == INTEGER_CST
9069 && tree_int_cst_sgn (len
) == 1)
9071 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9072 tree cst_uchar_ptr_node
9073 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9075 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9076 build1 (INDIRECT_REF
, cst_uchar_node
,
9077 fold_convert_loc (loc
,
9080 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9083 /* If len parameter is one, return an expression corresponding to
9084 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9085 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9087 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9088 tree cst_uchar_ptr_node
9089 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9091 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9092 build1 (INDIRECT_REF
, cst_uchar_node
,
9093 fold_convert_loc (loc
,
9096 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9097 build1 (INDIRECT_REF
, cst_uchar_node
,
9098 fold_convert_loc (loc
,
9101 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9107 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9108 ARG. Return NULL_TREE if no simplification can be made. */
9111 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9113 if (!validate_arg (arg
, REAL_TYPE
))
9116 /* If ARG is a compile-time constant, determine the result. */
9117 if (TREE_CODE (arg
) == REAL_CST
9118 && !TREE_OVERFLOW (arg
))
9122 c
= TREE_REAL_CST (arg
);
9123 return (REAL_VALUE_NEGATIVE (c
)
9124 ? build_one_cst (type
)
9125 : build_zero_cst (type
));
9128 /* If ARG is non-negative, the result is always zero. */
9129 if (tree_expr_nonnegative_p (arg
))
9130 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9132 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9133 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9134 return fold_build2_loc (loc
, LT_EXPR
, type
, arg
,
9135 build_real (TREE_TYPE (arg
), dconst0
));
9140 /* Fold function call to builtin copysign, copysignf or copysignl with
9141 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9145 fold_builtin_copysign (location_t loc
, tree fndecl
,
9146 tree arg1
, tree arg2
, tree type
)
9150 if (!validate_arg (arg1
, REAL_TYPE
)
9151 || !validate_arg (arg2
, REAL_TYPE
))
9154 /* copysign(X,X) is X. */
9155 if (operand_equal_p (arg1
, arg2
, 0))
9156 return fold_convert_loc (loc
, type
, arg1
);
9158 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9159 if (TREE_CODE (arg1
) == REAL_CST
9160 && TREE_CODE (arg2
) == REAL_CST
9161 && !TREE_OVERFLOW (arg1
)
9162 && !TREE_OVERFLOW (arg2
))
9164 REAL_VALUE_TYPE c1
, c2
;
9166 c1
= TREE_REAL_CST (arg1
);
9167 c2
= TREE_REAL_CST (arg2
);
9168 /* c1.sign := c2.sign. */
9169 real_copysign (&c1
, &c2
);
9170 return build_real (type
, c1
);
9173 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9174 Remember to evaluate Y for side-effects. */
9175 if (tree_expr_nonnegative_p (arg2
))
9176 return omit_one_operand_loc (loc
, type
,
9177 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9180 /* Strip sign changing operations for the first argument. */
9181 tem
= fold_strip_sign_ops (arg1
);
9183 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9188 /* Fold a call to builtin isascii with argument ARG. */
9191 fold_builtin_isascii (location_t loc
, tree arg
)
9193 if (!validate_arg (arg
, INTEGER_TYPE
))
9197 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9198 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9199 build_int_cst (NULL_TREE
,
9200 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9201 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9202 arg
, integer_zero_node
);
9206 /* Fold a call to builtin toascii with argument ARG. */
9209 fold_builtin_toascii (location_t loc
, tree arg
)
9211 if (!validate_arg (arg
, INTEGER_TYPE
))
9214 /* Transform toascii(c) -> (c & 0x7f). */
9215 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9216 build_int_cst (NULL_TREE
, 0x7f));
9219 /* Fold a call to builtin isdigit with argument ARG. */
9222 fold_builtin_isdigit (location_t loc
, tree arg
)
9224 if (!validate_arg (arg
, INTEGER_TYPE
))
9228 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9229 /* According to the C standard, isdigit is unaffected by locale.
9230 However, it definitely is affected by the target character set. */
9231 unsigned HOST_WIDE_INT target_digit0
9232 = lang_hooks
.to_target_charset ('0');
9234 if (target_digit0
== 0)
9237 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9238 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9239 build_int_cst (unsigned_type_node
, target_digit0
));
9240 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9241 build_int_cst (unsigned_type_node
, 9));
9245 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9248 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9250 if (!validate_arg (arg
, REAL_TYPE
))
9253 arg
= fold_convert_loc (loc
, type
, arg
);
9254 if (TREE_CODE (arg
) == REAL_CST
)
9255 return fold_abs_const (arg
, type
);
9256 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9259 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9262 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9264 if (!validate_arg (arg
, INTEGER_TYPE
))
9267 arg
= fold_convert_loc (loc
, type
, arg
);
9268 if (TREE_CODE (arg
) == INTEGER_CST
)
9269 return fold_abs_const (arg
, type
);
9270 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9273 /* Fold a fma operation with arguments ARG[012]. */
9276 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9277 tree type
, tree arg0
, tree arg1
, tree arg2
)
9279 if (TREE_CODE (arg0
) == REAL_CST
9280 && TREE_CODE (arg1
) == REAL_CST
9281 && TREE_CODE (arg2
) == REAL_CST
)
9282 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9287 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9290 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9292 if (validate_arg (arg0
, REAL_TYPE
)
9293 && validate_arg(arg1
, REAL_TYPE
)
9294 && validate_arg(arg2
, REAL_TYPE
))
9296 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9300 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9301 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9302 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9307 /* Fold a call to builtin fmin or fmax. */
9310 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9311 tree type
, bool max
)
9313 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9315 /* Calculate the result when the argument is a constant. */
9316 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9321 /* If either argument is NaN, return the other one. Avoid the
9322 transformation if we get (and honor) a signalling NaN. Using
9323 omit_one_operand() ensures we create a non-lvalue. */
9324 if (TREE_CODE (arg0
) == REAL_CST
9325 && real_isnan (&TREE_REAL_CST (arg0
))
9326 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9327 || ! TREE_REAL_CST (arg0
).signalling
))
9328 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9329 if (TREE_CODE (arg1
) == REAL_CST
9330 && real_isnan (&TREE_REAL_CST (arg1
))
9331 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9332 || ! TREE_REAL_CST (arg1
).signalling
))
9333 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9335 /* Transform fmin/fmax(x,x) -> x. */
9336 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9337 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9339 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9340 functions to return the numeric arg if the other one is NaN.
9341 These tree codes don't honor that, so only transform if
9342 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9343 handled, so we don't have to worry about it either. */
9344 if (flag_finite_math_only
)
9345 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9346 fold_convert_loc (loc
, type
, arg0
),
9347 fold_convert_loc (loc
, type
, arg1
));
9352 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9355 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9357 if (validate_arg (arg
, COMPLEX_TYPE
)
9358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9360 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9364 tree new_arg
= builtin_save_expr (arg
);
9365 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9366 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9367 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9374 /* Fold a call to builtin logb/ilogb. */
9377 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9379 if (! validate_arg (arg
, REAL_TYPE
))
9384 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9386 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9392 /* If arg is Inf or NaN and we're logb, return it. */
9393 if (TREE_CODE (rettype
) == REAL_TYPE
)
9394 return fold_convert_loc (loc
, rettype
, arg
);
9395 /* Fall through... */
9397 /* Zero may set errno and/or raise an exception for logb, also
9398 for ilogb we don't know FP_ILOGB0. */
9401 /* For normal numbers, proceed iff radix == 2. In GCC,
9402 normalized significands are in the range [0.5, 1.0). We
9403 want the exponent as if they were [1.0, 2.0) so get the
9404 exponent and subtract 1. */
9405 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9406 return fold_convert_loc (loc
, rettype
,
9407 build_int_cst (NULL_TREE
,
9408 REAL_EXP (value
)-1));
9416 /* Fold a call to builtin significand, if radix == 2. */
9419 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9421 if (! validate_arg (arg
, REAL_TYPE
))
9426 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9428 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9435 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9436 return fold_convert_loc (loc
, rettype
, arg
);
9438 /* For normal numbers, proceed iff radix == 2. */
9439 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9441 REAL_VALUE_TYPE result
= *value
;
9442 /* In GCC, normalized significands are in the range [0.5,
9443 1.0). We want them to be [1.0, 2.0) so set the
9445 SET_REAL_EXP (&result
, 1);
9446 return build_real (rettype
, result
);
9455 /* Fold a call to builtin frexp, we can assume the base is 2. */
9458 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9460 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9465 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9468 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9470 /* Proceed if a valid pointer type was passed in. */
9471 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9473 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9479 /* For +-0, return (*exp = 0, +-0). */
9480 exp
= integer_zero_node
;
9485 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9486 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9489 /* Since the frexp function always expects base 2, and in
9490 GCC normalized significands are already in the range
9491 [0.5, 1.0), we have exactly what frexp wants. */
9492 REAL_VALUE_TYPE frac_rvt
= *value
;
9493 SET_REAL_EXP (&frac_rvt
, 0);
9494 frac
= build_real (rettype
, frac_rvt
);
9495 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9502 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9503 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9504 TREE_SIDE_EFFECTS (arg1
) = 1;
9505 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9511 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9512 then we can assume the base is two. If it's false, then we have to
9513 check the mode of the TYPE parameter in certain cases. */
9516 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9517 tree type
, bool ldexp
)
9519 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9524 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9525 if (real_zerop (arg0
) || integer_zerop (arg1
)
9526 || (TREE_CODE (arg0
) == REAL_CST
9527 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9528 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9530 /* If both arguments are constant, then try to evaluate it. */
9531 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9532 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9533 && host_integerp (arg1
, 0))
9535 /* Bound the maximum adjustment to twice the range of the
9536 mode's valid exponents. Use abs to ensure the range is
9537 positive as a sanity check. */
9538 const long max_exp_adj
= 2 *
9539 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9540 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9542 /* Get the user-requested adjustment. */
9543 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9545 /* The requested adjustment must be inside this range. This
9546 is a preliminary cap to avoid things like overflow, we
9547 may still fail to compute the result for other reasons. */
9548 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9550 REAL_VALUE_TYPE initial_result
;
9552 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9554 /* Ensure we didn't overflow. */
9555 if (! real_isinf (&initial_result
))
9557 const REAL_VALUE_TYPE trunc_result
9558 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9560 /* Only proceed if the target mode can hold the
9562 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9563 return build_real (type
, trunc_result
);
9572 /* Fold a call to builtin modf. */
9575 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9577 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9582 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9585 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9587 /* Proceed if a valid pointer type was passed in. */
9588 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9590 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9591 REAL_VALUE_TYPE trunc
, frac
;
9597 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9598 trunc
= frac
= *value
;
9601 /* For +-Inf, return (*arg1 = arg0, +-0). */
9603 frac
.sign
= value
->sign
;
9607 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9608 real_trunc (&trunc
, VOIDmode
, value
);
9609 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9610 /* If the original number was negative and already
9611 integral, then the fractional part is -0.0. */
9612 if (value
->sign
&& frac
.cl
== rvc_zero
)
9613 frac
.sign
= value
->sign
;
9617 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9618 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9619 build_real (rettype
, trunc
));
9620 TREE_SIDE_EFFECTS (arg1
) = 1;
9621 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9622 build_real (rettype
, frac
));
9628 /* Given a location LOC, an interclass builtin function decl FNDECL
9629 and its single argument ARG, return an folded expression computing
9630 the same, or NULL_TREE if we either couldn't or didn't want to fold
9631 (the latter happen if there's an RTL instruction available). */
9634 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9636 enum machine_mode mode
;
9638 if (!validate_arg (arg
, REAL_TYPE
))
9641 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9644 mode
= TYPE_MODE (TREE_TYPE (arg
));
9646 /* If there is no optab, try generic code. */
9647 switch (DECL_FUNCTION_CODE (fndecl
))
9651 CASE_FLT_FN (BUILT_IN_ISINF
):
9653 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9654 tree
const isgr_fn
= built_in_decls
[BUILT_IN_ISGREATER
];
9655 tree
const type
= TREE_TYPE (arg
);
9659 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9660 real_from_string (&r
, buf
);
9661 result
= build_call_expr (isgr_fn
, 2,
9662 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9663 build_real (type
, r
));
9666 CASE_FLT_FN (BUILT_IN_FINITE
):
9667 case BUILT_IN_ISFINITE
:
9669 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9670 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
9671 tree
const type
= TREE_TYPE (arg
);
9675 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9676 real_from_string (&r
, buf
);
9677 result
= build_call_expr (isle_fn
, 2,
9678 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9679 build_real (type
, r
));
9680 /*result = fold_build2_loc (loc, UNGT_EXPR,
9681 TREE_TYPE (TREE_TYPE (fndecl)),
9682 fold_build1_loc (loc, ABS_EXPR, type, arg),
9683 build_real (type, r));
9684 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9685 TREE_TYPE (TREE_TYPE (fndecl)),
9689 case BUILT_IN_ISNORMAL
:
9691 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9692 islessequal(fabs(x),DBL_MAX). */
9693 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
9694 tree
const isge_fn
= built_in_decls
[BUILT_IN_ISGREATEREQUAL
];
9695 tree
const type
= TREE_TYPE (arg
);
9696 REAL_VALUE_TYPE rmax
, rmin
;
9699 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9700 real_from_string (&rmax
, buf
);
9701 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9702 real_from_string (&rmin
, buf
);
9703 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9704 result
= build_call_expr (isle_fn
, 2, arg
,
9705 build_real (type
, rmax
));
9706 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9707 build_call_expr (isge_fn
, 2, arg
,
9708 build_real (type
, rmin
)));
9718 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9719 ARG is the argument for the call. */
9722 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9724 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9727 if (!validate_arg (arg
, REAL_TYPE
))
9730 switch (builtin_index
)
9732 case BUILT_IN_ISINF
:
9733 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9734 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9736 if (TREE_CODE (arg
) == REAL_CST
)
9738 r
= TREE_REAL_CST (arg
);
9739 if (real_isinf (&r
))
9740 return real_compare (GT_EXPR
, &r
, &dconst0
)
9741 ? integer_one_node
: integer_minus_one_node
;
9743 return integer_zero_node
;
9748 case BUILT_IN_ISINF_SIGN
:
9750 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9751 /* In a boolean context, GCC will fold the inner COND_EXPR to
9752 1. So e.g. "if (isinf_sign(x))" would be folded to just
9753 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9754 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9755 tree isinf_fn
= built_in_decls
[BUILT_IN_ISINF
];
9756 tree tmp
= NULL_TREE
;
9758 arg
= builtin_save_expr (arg
);
9760 if (signbit_fn
&& isinf_fn
)
9762 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9763 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9765 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9766 signbit_call
, integer_zero_node
);
9767 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9768 isinf_call
, integer_zero_node
);
9770 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9771 integer_minus_one_node
, integer_one_node
);
9772 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9780 case BUILT_IN_ISFINITE
:
9781 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9782 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9783 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9785 if (TREE_CODE (arg
) == REAL_CST
)
9787 r
= TREE_REAL_CST (arg
);
9788 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9793 case BUILT_IN_ISNAN
:
9794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9795 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9797 if (TREE_CODE (arg
) == REAL_CST
)
9799 r
= TREE_REAL_CST (arg
);
9800 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9803 arg
= builtin_save_expr (arg
);
9804 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9811 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9812 This builtin will generate code to return the appropriate floating
9813 point classification depending on the value of the floating point
9814 number passed in. The possible return values must be supplied as
9815 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9816 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9817 one floating point argument which is "type generic". */
9820 fold_builtin_fpclassify (location_t loc
, tree exp
)
9822 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9823 arg
, type
, res
, tmp
;
9824 enum machine_mode mode
;
9828 /* Verify the required arguments in the original call. */
9829 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
9830 INTEGER_TYPE
, INTEGER_TYPE
,
9831 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
9834 fp_nan
= CALL_EXPR_ARG (exp
, 0);
9835 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
9836 fp_normal
= CALL_EXPR_ARG (exp
, 2);
9837 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
9838 fp_zero
= CALL_EXPR_ARG (exp
, 4);
9839 arg
= CALL_EXPR_ARG (exp
, 5);
9840 type
= TREE_TYPE (arg
);
9841 mode
= TYPE_MODE (type
);
9842 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9846 (fabs(x) == Inf ? FP_INFINITE :
9847 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9848 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9850 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9851 build_real (type
, dconst0
));
9852 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9853 tmp
, fp_zero
, fp_subnormal
);
9855 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9856 real_from_string (&r
, buf
);
9857 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9858 arg
, build_real (type
, r
));
9859 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9861 if (HONOR_INFINITIES (mode
))
9864 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9865 build_real (type
, r
));
9866 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9870 if (HONOR_NANS (mode
))
9872 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9873 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9879 /* Fold a call to an unordered comparison function such as
9880 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9881 being called and ARG0 and ARG1 are the arguments for the call.
9882 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9883 the opposite of the desired result. UNORDERED_CODE is used
9884 for modes that can hold NaNs and ORDERED_CODE is used for
9888 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9889 enum tree_code unordered_code
,
9890 enum tree_code ordered_code
)
9892 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9893 enum tree_code code
;
9895 enum tree_code code0
, code1
;
9896 tree cmp_type
= NULL_TREE
;
9898 type0
= TREE_TYPE (arg0
);
9899 type1
= TREE_TYPE (arg1
);
9901 code0
= TREE_CODE (type0
);
9902 code1
= TREE_CODE (type1
);
9904 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9905 /* Choose the wider of two real types. */
9906 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9908 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9910 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9913 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9914 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9916 if (unordered_code
== UNORDERED_EXPR
)
9918 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9919 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9920 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9923 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9925 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9926 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9929 /* Fold a call to built-in function FNDECL with 0 arguments.
9930 IGNORE is true if the result of the function call is ignored. This
9931 function returns NULL_TREE if no simplification was possible. */
9934 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9936 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9937 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9940 CASE_FLT_FN (BUILT_IN_INF
):
9941 case BUILT_IN_INFD32
:
9942 case BUILT_IN_INFD64
:
9943 case BUILT_IN_INFD128
:
9944 return fold_builtin_inf (loc
, type
, true);
9946 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9947 return fold_builtin_inf (loc
, type
, false);
9949 case BUILT_IN_CLASSIFY_TYPE
:
9950 return fold_builtin_classify_type (NULL_TREE
);
9958 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9959 IGNORE is true if the result of the function call is ignored. This
9960 function returns NULL_TREE if no simplification was possible. */
9963 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
9965 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9966 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9969 case BUILT_IN_CONSTANT_P
:
9971 tree val
= fold_builtin_constant_p (arg0
);
9973 /* Gimplification will pull the CALL_EXPR for the builtin out of
9974 an if condition. When not optimizing, we'll not CSE it back.
9975 To avoid link error types of regressions, return false now. */
9976 if (!val
&& !optimize
)
9977 val
= integer_zero_node
;
9982 case BUILT_IN_CLASSIFY_TYPE
:
9983 return fold_builtin_classify_type (arg0
);
9985 case BUILT_IN_STRLEN
:
9986 return fold_builtin_strlen (loc
, type
, arg0
);
9988 CASE_FLT_FN (BUILT_IN_FABS
):
9989 return fold_builtin_fabs (loc
, arg0
, type
);
9993 case BUILT_IN_LLABS
:
9994 case BUILT_IN_IMAXABS
:
9995 return fold_builtin_abs (loc
, arg0
, type
);
9997 CASE_FLT_FN (BUILT_IN_CONJ
):
9998 if (validate_arg (arg0
, COMPLEX_TYPE
)
9999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10000 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10003 CASE_FLT_FN (BUILT_IN_CREAL
):
10004 if (validate_arg (arg0
, COMPLEX_TYPE
)
10005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10006 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10009 CASE_FLT_FN (BUILT_IN_CIMAG
):
10010 if (validate_arg (arg0
, COMPLEX_TYPE
)
10011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10012 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10015 CASE_FLT_FN (BUILT_IN_CCOS
):
10016 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10018 CASE_FLT_FN (BUILT_IN_CCOSH
):
10019 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10021 CASE_FLT_FN (BUILT_IN_CPROJ
):
10022 return fold_builtin_cproj(loc
, arg0
, type
);
10024 CASE_FLT_FN (BUILT_IN_CSIN
):
10025 if (validate_arg (arg0
, COMPLEX_TYPE
)
10026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10027 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10030 CASE_FLT_FN (BUILT_IN_CSINH
):
10031 if (validate_arg (arg0
, COMPLEX_TYPE
)
10032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10033 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10036 CASE_FLT_FN (BUILT_IN_CTAN
):
10037 if (validate_arg (arg0
, COMPLEX_TYPE
)
10038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10039 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10042 CASE_FLT_FN (BUILT_IN_CTANH
):
10043 if (validate_arg (arg0
, COMPLEX_TYPE
)
10044 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10045 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10048 CASE_FLT_FN (BUILT_IN_CLOG
):
10049 if (validate_arg (arg0
, COMPLEX_TYPE
)
10050 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10051 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10054 CASE_FLT_FN (BUILT_IN_CSQRT
):
10055 if (validate_arg (arg0
, COMPLEX_TYPE
)
10056 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10057 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10060 CASE_FLT_FN (BUILT_IN_CASIN
):
10061 if (validate_arg (arg0
, COMPLEX_TYPE
)
10062 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10063 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10066 CASE_FLT_FN (BUILT_IN_CACOS
):
10067 if (validate_arg (arg0
, COMPLEX_TYPE
)
10068 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10069 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10072 CASE_FLT_FN (BUILT_IN_CATAN
):
10073 if (validate_arg (arg0
, COMPLEX_TYPE
)
10074 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10075 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10078 CASE_FLT_FN (BUILT_IN_CASINH
):
10079 if (validate_arg (arg0
, COMPLEX_TYPE
)
10080 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10081 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10084 CASE_FLT_FN (BUILT_IN_CACOSH
):
10085 if (validate_arg (arg0
, COMPLEX_TYPE
)
10086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10087 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10090 CASE_FLT_FN (BUILT_IN_CATANH
):
10091 if (validate_arg (arg0
, COMPLEX_TYPE
)
10092 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10093 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10096 CASE_FLT_FN (BUILT_IN_CABS
):
10097 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10099 CASE_FLT_FN (BUILT_IN_CARG
):
10100 return fold_builtin_carg (loc
, arg0
, type
);
10102 CASE_FLT_FN (BUILT_IN_SQRT
):
10103 return fold_builtin_sqrt (loc
, arg0
, type
);
10105 CASE_FLT_FN (BUILT_IN_CBRT
):
10106 return fold_builtin_cbrt (loc
, arg0
, type
);
10108 CASE_FLT_FN (BUILT_IN_ASIN
):
10109 if (validate_arg (arg0
, REAL_TYPE
))
10110 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10111 &dconstm1
, &dconst1
, true);
10114 CASE_FLT_FN (BUILT_IN_ACOS
):
10115 if (validate_arg (arg0
, REAL_TYPE
))
10116 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10117 &dconstm1
, &dconst1
, true);
10120 CASE_FLT_FN (BUILT_IN_ATAN
):
10121 if (validate_arg (arg0
, REAL_TYPE
))
10122 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10125 CASE_FLT_FN (BUILT_IN_ASINH
):
10126 if (validate_arg (arg0
, REAL_TYPE
))
10127 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10130 CASE_FLT_FN (BUILT_IN_ACOSH
):
10131 if (validate_arg (arg0
, REAL_TYPE
))
10132 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10133 &dconst1
, NULL
, true);
10136 CASE_FLT_FN (BUILT_IN_ATANH
):
10137 if (validate_arg (arg0
, REAL_TYPE
))
10138 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10139 &dconstm1
, &dconst1
, false);
10142 CASE_FLT_FN (BUILT_IN_SIN
):
10143 if (validate_arg (arg0
, REAL_TYPE
))
10144 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10147 CASE_FLT_FN (BUILT_IN_COS
):
10148 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10150 CASE_FLT_FN (BUILT_IN_TAN
):
10151 return fold_builtin_tan (arg0
, type
);
10153 CASE_FLT_FN (BUILT_IN_CEXP
):
10154 return fold_builtin_cexp (loc
, arg0
, type
);
10156 CASE_FLT_FN (BUILT_IN_CEXPI
):
10157 if (validate_arg (arg0
, REAL_TYPE
))
10158 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10161 CASE_FLT_FN (BUILT_IN_SINH
):
10162 if (validate_arg (arg0
, REAL_TYPE
))
10163 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10166 CASE_FLT_FN (BUILT_IN_COSH
):
10167 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10169 CASE_FLT_FN (BUILT_IN_TANH
):
10170 if (validate_arg (arg0
, REAL_TYPE
))
10171 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10174 CASE_FLT_FN (BUILT_IN_ERF
):
10175 if (validate_arg (arg0
, REAL_TYPE
))
10176 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10179 CASE_FLT_FN (BUILT_IN_ERFC
):
10180 if (validate_arg (arg0
, REAL_TYPE
))
10181 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10184 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10185 if (validate_arg (arg0
, REAL_TYPE
))
10186 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10189 CASE_FLT_FN (BUILT_IN_EXP
):
10190 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10192 CASE_FLT_FN (BUILT_IN_EXP2
):
10193 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10195 CASE_FLT_FN (BUILT_IN_EXP10
):
10196 CASE_FLT_FN (BUILT_IN_POW10
):
10197 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10199 CASE_FLT_FN (BUILT_IN_EXPM1
):
10200 if (validate_arg (arg0
, REAL_TYPE
))
10201 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10204 CASE_FLT_FN (BUILT_IN_LOG
):
10205 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10207 CASE_FLT_FN (BUILT_IN_LOG2
):
10208 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10210 CASE_FLT_FN (BUILT_IN_LOG10
):
10211 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10213 CASE_FLT_FN (BUILT_IN_LOG1P
):
10214 if (validate_arg (arg0
, REAL_TYPE
))
10215 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10216 &dconstm1
, NULL
, false);
10219 CASE_FLT_FN (BUILT_IN_J0
):
10220 if (validate_arg (arg0
, REAL_TYPE
))
10221 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10225 CASE_FLT_FN (BUILT_IN_J1
):
10226 if (validate_arg (arg0
, REAL_TYPE
))
10227 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10231 CASE_FLT_FN (BUILT_IN_Y0
):
10232 if (validate_arg (arg0
, REAL_TYPE
))
10233 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10234 &dconst0
, NULL
, false);
10237 CASE_FLT_FN (BUILT_IN_Y1
):
10238 if (validate_arg (arg0
, REAL_TYPE
))
10239 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10240 &dconst0
, NULL
, false);
10243 CASE_FLT_FN (BUILT_IN_NAN
):
10244 case BUILT_IN_NAND32
:
10245 case BUILT_IN_NAND64
:
10246 case BUILT_IN_NAND128
:
10247 return fold_builtin_nan (arg0
, type
, true);
10249 CASE_FLT_FN (BUILT_IN_NANS
):
10250 return fold_builtin_nan (arg0
, type
, false);
10252 CASE_FLT_FN (BUILT_IN_FLOOR
):
10253 return fold_builtin_floor (loc
, fndecl
, arg0
);
10255 CASE_FLT_FN (BUILT_IN_CEIL
):
10256 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10258 CASE_FLT_FN (BUILT_IN_TRUNC
):
10259 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10261 CASE_FLT_FN (BUILT_IN_ROUND
):
10262 return fold_builtin_round (loc
, fndecl
, arg0
);
10264 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10265 CASE_FLT_FN (BUILT_IN_RINT
):
10266 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10268 CASE_FLT_FN (BUILT_IN_LCEIL
):
10269 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10270 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10271 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10272 CASE_FLT_FN (BUILT_IN_LROUND
):
10273 CASE_FLT_FN (BUILT_IN_LLROUND
):
10274 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10276 CASE_FLT_FN (BUILT_IN_LRINT
):
10277 CASE_FLT_FN (BUILT_IN_LLRINT
):
10278 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10280 case BUILT_IN_BSWAP32
:
10281 case BUILT_IN_BSWAP64
:
10282 return fold_builtin_bswap (fndecl
, arg0
);
10284 CASE_INT_FN (BUILT_IN_FFS
):
10285 CASE_INT_FN (BUILT_IN_CLZ
):
10286 CASE_INT_FN (BUILT_IN_CTZ
):
10287 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10288 CASE_INT_FN (BUILT_IN_PARITY
):
10289 return fold_builtin_bitop (fndecl
, arg0
);
10291 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10292 return fold_builtin_signbit (loc
, arg0
, type
);
10294 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10295 return fold_builtin_significand (loc
, arg0
, type
);
10297 CASE_FLT_FN (BUILT_IN_ILOGB
):
10298 CASE_FLT_FN (BUILT_IN_LOGB
):
10299 return fold_builtin_logb (loc
, arg0
, type
);
10301 case BUILT_IN_ISASCII
:
10302 return fold_builtin_isascii (loc
, arg0
);
10304 case BUILT_IN_TOASCII
:
10305 return fold_builtin_toascii (loc
, arg0
);
10307 case BUILT_IN_ISDIGIT
:
10308 return fold_builtin_isdigit (loc
, arg0
);
10310 CASE_FLT_FN (BUILT_IN_FINITE
):
10311 case BUILT_IN_FINITED32
:
10312 case BUILT_IN_FINITED64
:
10313 case BUILT_IN_FINITED128
:
10314 case BUILT_IN_ISFINITE
:
10316 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10319 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10322 CASE_FLT_FN (BUILT_IN_ISINF
):
10323 case BUILT_IN_ISINFD32
:
10324 case BUILT_IN_ISINFD64
:
10325 case BUILT_IN_ISINFD128
:
10327 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10330 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10333 case BUILT_IN_ISNORMAL
:
10334 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10336 case BUILT_IN_ISINF_SIGN
:
10337 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10339 CASE_FLT_FN (BUILT_IN_ISNAN
):
10340 case BUILT_IN_ISNAND32
:
10341 case BUILT_IN_ISNAND64
:
10342 case BUILT_IN_ISNAND128
:
10343 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10345 case BUILT_IN_PRINTF
:
10346 case BUILT_IN_PRINTF_UNLOCKED
:
10347 case BUILT_IN_VPRINTF
:
10348 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10350 case BUILT_IN_FREE
:
10351 if (integer_zerop (arg0
))
10352 return build_empty_stmt (loc
);
10363 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10364 IGNORE is true if the result of the function call is ignored. This
10365 function returns NULL_TREE if no simplification was possible. */
10368 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10370 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10371 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10375 CASE_FLT_FN (BUILT_IN_JN
):
10376 if (validate_arg (arg0
, INTEGER_TYPE
)
10377 && validate_arg (arg1
, REAL_TYPE
))
10378 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10381 CASE_FLT_FN (BUILT_IN_YN
):
10382 if (validate_arg (arg0
, INTEGER_TYPE
)
10383 && validate_arg (arg1
, REAL_TYPE
))
10384 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10388 CASE_FLT_FN (BUILT_IN_DREM
):
10389 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10390 if (validate_arg (arg0
, REAL_TYPE
)
10391 && validate_arg(arg1
, REAL_TYPE
))
10392 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10395 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10396 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10397 if (validate_arg (arg0
, REAL_TYPE
)
10398 && validate_arg(arg1
, POINTER_TYPE
))
10399 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10402 CASE_FLT_FN (BUILT_IN_ATAN2
):
10403 if (validate_arg (arg0
, REAL_TYPE
)
10404 && validate_arg(arg1
, REAL_TYPE
))
10405 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10408 CASE_FLT_FN (BUILT_IN_FDIM
):
10409 if (validate_arg (arg0
, REAL_TYPE
)
10410 && validate_arg(arg1
, REAL_TYPE
))
10411 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10414 CASE_FLT_FN (BUILT_IN_HYPOT
):
10415 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10417 CASE_FLT_FN (BUILT_IN_CPOW
):
10418 if (validate_arg (arg0
, COMPLEX_TYPE
)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10420 && validate_arg (arg1
, COMPLEX_TYPE
)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10422 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10425 CASE_FLT_FN (BUILT_IN_LDEXP
):
10426 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10427 CASE_FLT_FN (BUILT_IN_SCALBN
):
10428 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10429 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10430 type
, /*ldexp=*/false);
10432 CASE_FLT_FN (BUILT_IN_FREXP
):
10433 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10435 CASE_FLT_FN (BUILT_IN_MODF
):
10436 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10438 case BUILT_IN_BZERO
:
10439 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10441 case BUILT_IN_FPUTS
:
10442 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10444 case BUILT_IN_FPUTS_UNLOCKED
:
10445 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10447 case BUILT_IN_STRSTR
:
10448 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10450 case BUILT_IN_STRCAT
:
10451 return fold_builtin_strcat (loc
, arg0
, arg1
);
10453 case BUILT_IN_STRSPN
:
10454 return fold_builtin_strspn (loc
, arg0
, arg1
);
10456 case BUILT_IN_STRCSPN
:
10457 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10459 case BUILT_IN_STRCHR
:
10460 case BUILT_IN_INDEX
:
10461 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10463 case BUILT_IN_STRRCHR
:
10464 case BUILT_IN_RINDEX
:
10465 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10467 case BUILT_IN_STRCPY
:
10468 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10470 case BUILT_IN_STPCPY
:
10473 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
10477 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10480 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10483 case BUILT_IN_STRCMP
:
10484 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10486 case BUILT_IN_STRPBRK
:
10487 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10489 case BUILT_IN_EXPECT
:
10490 return fold_builtin_expect (loc
, arg0
, arg1
);
10492 CASE_FLT_FN (BUILT_IN_POW
):
10493 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10495 CASE_FLT_FN (BUILT_IN_POWI
):
10496 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10498 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10499 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10501 CASE_FLT_FN (BUILT_IN_FMIN
):
10502 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10504 CASE_FLT_FN (BUILT_IN_FMAX
):
10505 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10507 case BUILT_IN_ISGREATER
:
10508 return fold_builtin_unordered_cmp (loc
, fndecl
,
10509 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10510 case BUILT_IN_ISGREATEREQUAL
:
10511 return fold_builtin_unordered_cmp (loc
, fndecl
,
10512 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10513 case BUILT_IN_ISLESS
:
10514 return fold_builtin_unordered_cmp (loc
, fndecl
,
10515 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10516 case BUILT_IN_ISLESSEQUAL
:
10517 return fold_builtin_unordered_cmp (loc
, fndecl
,
10518 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10519 case BUILT_IN_ISLESSGREATER
:
10520 return fold_builtin_unordered_cmp (loc
, fndecl
,
10521 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10522 case BUILT_IN_ISUNORDERED
:
10523 return fold_builtin_unordered_cmp (loc
, fndecl
,
10524 arg0
, arg1
, UNORDERED_EXPR
,
10527 /* We do the folding for va_start in the expander. */
10528 case BUILT_IN_VA_START
:
10531 case BUILT_IN_SPRINTF
:
10532 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10534 case BUILT_IN_OBJECT_SIZE
:
10535 return fold_builtin_object_size (arg0
, arg1
);
10537 case BUILT_IN_PRINTF
:
10538 case BUILT_IN_PRINTF_UNLOCKED
:
10539 case BUILT_IN_VPRINTF
:
10540 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10542 case BUILT_IN_PRINTF_CHK
:
10543 case BUILT_IN_VPRINTF_CHK
:
10544 if (!validate_arg (arg0
, INTEGER_TYPE
)
10545 || TREE_SIDE_EFFECTS (arg0
))
10548 return fold_builtin_printf (loc
, fndecl
,
10549 arg1
, NULL_TREE
, ignore
, fcode
);
10552 case BUILT_IN_FPRINTF
:
10553 case BUILT_IN_FPRINTF_UNLOCKED
:
10554 case BUILT_IN_VFPRINTF
:
10555 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10564 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10565 and ARG2. IGNORE is true if the result of the function call is ignored.
10566 This function returns NULL_TREE if no simplification was possible. */
10569 fold_builtin_3 (location_t loc
, tree fndecl
,
10570 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10572 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10573 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10577 CASE_FLT_FN (BUILT_IN_SINCOS
):
10578 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10580 CASE_FLT_FN (BUILT_IN_FMA
):
10581 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10584 CASE_FLT_FN (BUILT_IN_REMQUO
):
10585 if (validate_arg (arg0
, REAL_TYPE
)
10586 && validate_arg(arg1
, REAL_TYPE
)
10587 && validate_arg(arg2
, POINTER_TYPE
))
10588 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10591 case BUILT_IN_MEMSET
:
10592 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10594 case BUILT_IN_BCOPY
:
10595 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10596 void_type_node
, true, /*endp=*/3);
10598 case BUILT_IN_MEMCPY
:
10599 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10600 type
, ignore
, /*endp=*/0);
10602 case BUILT_IN_MEMPCPY
:
10603 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10604 type
, ignore
, /*endp=*/1);
10606 case BUILT_IN_MEMMOVE
:
10607 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10608 type
, ignore
, /*endp=*/3);
10610 case BUILT_IN_STRNCAT
:
10611 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10613 case BUILT_IN_STRNCPY
:
10614 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10616 case BUILT_IN_STRNCMP
:
10617 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10619 case BUILT_IN_MEMCHR
:
10620 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10622 case BUILT_IN_BCMP
:
10623 case BUILT_IN_MEMCMP
:
10624 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10626 case BUILT_IN_SPRINTF
:
10627 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10629 case BUILT_IN_STRCPY_CHK
:
10630 case BUILT_IN_STPCPY_CHK
:
10631 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10634 case BUILT_IN_STRCAT_CHK
:
10635 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10637 case BUILT_IN_PRINTF_CHK
:
10638 case BUILT_IN_VPRINTF_CHK
:
10639 if (!validate_arg (arg0
, INTEGER_TYPE
)
10640 || TREE_SIDE_EFFECTS (arg0
))
10643 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10646 case BUILT_IN_FPRINTF
:
10647 case BUILT_IN_FPRINTF_UNLOCKED
:
10648 case BUILT_IN_VFPRINTF
:
10649 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10652 case BUILT_IN_FPRINTF_CHK
:
10653 case BUILT_IN_VFPRINTF_CHK
:
10654 if (!validate_arg (arg1
, INTEGER_TYPE
)
10655 || TREE_SIDE_EFFECTS (arg1
))
10658 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10667 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10668 ARG2, and ARG3. IGNORE is true if the result of the function call is
10669 ignored. This function returns NULL_TREE if no simplification was
10673 fold_builtin_4 (location_t loc
, tree fndecl
,
10674 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10676 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10680 case BUILT_IN_MEMCPY_CHK
:
10681 case BUILT_IN_MEMPCPY_CHK
:
10682 case BUILT_IN_MEMMOVE_CHK
:
10683 case BUILT_IN_MEMSET_CHK
:
10684 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
10686 DECL_FUNCTION_CODE (fndecl
));
10688 case BUILT_IN_STRNCPY_CHK
:
10689 return fold_builtin_strncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10691 case BUILT_IN_STRNCAT_CHK
:
10692 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10694 case BUILT_IN_FPRINTF_CHK
:
10695 case BUILT_IN_VFPRINTF_CHK
:
10696 if (!validate_arg (arg1
, INTEGER_TYPE
)
10697 || TREE_SIDE_EFFECTS (arg1
))
10700 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10710 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10711 arguments, where NARGS <= 4. IGNORE is true if the result of the
10712 function call is ignored. This function returns NULL_TREE if no
10713 simplification was possible. Note that this only folds builtins with
10714 fixed argument patterns. Foldings that do varargs-to-varargs
10715 transformations, or that match calls with more than 4 arguments,
10716 need to be handled with fold_builtin_varargs instead. */
10718 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10721 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10723 tree ret
= NULL_TREE
;
10728 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
10731 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
10734 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
10737 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
10740 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
10748 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10749 SET_EXPR_LOCATION (ret
, loc
);
10750 TREE_NO_WARNING (ret
) = 1;
10756 /* Builtins with folding operations that operate on "..." arguments
10757 need special handling; we need to store the arguments in a convenient
10758 data structure before attempting any folding. Fortunately there are
10759 only a few builtins that fall into this category. FNDECL is the
10760 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10761 result of the function call is ignored. */
10764 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
10765 bool ignore ATTRIBUTE_UNUSED
)
10767 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10768 tree ret
= NULL_TREE
;
10772 case BUILT_IN_SPRINTF_CHK
:
10773 case BUILT_IN_VSPRINTF_CHK
:
10774 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
10777 case BUILT_IN_SNPRINTF_CHK
:
10778 case BUILT_IN_VSNPRINTF_CHK
:
10779 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
10782 case BUILT_IN_FPCLASSIFY
:
10783 ret
= fold_builtin_fpclassify (loc
, exp
);
10791 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10792 SET_EXPR_LOCATION (ret
, loc
);
10793 TREE_NO_WARNING (ret
) = 1;
10799 /* Return true if FNDECL shouldn't be folded right now.
10800 If a built-in function has an inline attribute always_inline
10801 wrapper, defer folding it after always_inline functions have
10802 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10803 might not be performed. */
10806 avoid_folding_inline_builtin (tree fndecl
)
10808 return (DECL_DECLARED_INLINE_P (fndecl
)
10809 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10811 && !cfun
->always_inline_functions_inlined
10812 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10815 /* A wrapper function for builtin folding that prevents warnings for
10816 "statement without effect" and the like, caused by removing the
10817 call node earlier than the warning is generated. */
10820 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10822 tree ret
= NULL_TREE
;
10823 tree fndecl
= get_callee_fndecl (exp
);
10825 && TREE_CODE (fndecl
) == FUNCTION_DECL
10826 && DECL_BUILT_IN (fndecl
)
10827 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10828 yet. Defer folding until we see all the arguments
10829 (after inlining). */
10830 && !CALL_EXPR_VA_ARG_PACK (exp
))
10832 int nargs
= call_expr_nargs (exp
);
10834 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10835 instead last argument is __builtin_va_arg_pack (). Defer folding
10836 even in that case, until arguments are finalized. */
10837 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10839 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10841 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10842 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10843 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10847 if (avoid_folding_inline_builtin (fndecl
))
10850 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10851 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10852 CALL_EXPR_ARGP (exp
), ignore
);
10855 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10857 tree
*args
= CALL_EXPR_ARGP (exp
);
10858 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10861 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
10869 /* Conveniently construct a function call expression. FNDECL names the
10870 function to be called and N arguments are passed in the array
10874 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10876 tree fntype
= TREE_TYPE (fndecl
);
10877 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10879 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10882 /* Conveniently construct a function call expression. FNDECL names the
10883 function to be called and the arguments are passed in the vector
10887 build_call_expr_loc_vec (location_t loc
, tree fndecl
, VEC(tree
,gc
) *vec
)
10889 return build_call_expr_loc_array (loc
, fndecl
, VEC_length (tree
, vec
),
10890 VEC_address (tree
, vec
));
10894 /* Conveniently construct a function call expression. FNDECL names the
10895 function to be called, N is the number of arguments, and the "..."
10896 parameters are the argument expressions. */
10899 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10902 tree
*argarray
= XALLOCAVEC (tree
, n
);
10906 for (i
= 0; i
< n
; i
++)
10907 argarray
[i
] = va_arg (ap
, tree
);
10909 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10912 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10913 varargs macros aren't supported by all bootstrap compilers. */
10916 build_call_expr (tree fndecl
, int n
, ...)
10919 tree
*argarray
= XALLOCAVEC (tree
, n
);
10923 for (i
= 0; i
< n
; i
++)
10924 argarray
[i
] = va_arg (ap
, tree
);
10926 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10929 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10930 N arguments are passed in the array ARGARRAY. */
10933 fold_builtin_call_array (location_t loc
, tree type
,
10938 tree ret
= NULL_TREE
;
10941 if (TREE_CODE (fn
) == ADDR_EXPR
)
10943 tree fndecl
= TREE_OPERAND (fn
, 0);
10944 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10945 && DECL_BUILT_IN (fndecl
))
10947 /* If last argument is __builtin_va_arg_pack (), arguments to this
10948 function are not finalized yet. Defer folding until they are. */
10949 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10951 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10953 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10954 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10955 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10956 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10958 if (avoid_folding_inline_builtin (fndecl
))
10959 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10960 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10962 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10966 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10968 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10970 /* First try the transformations that don't require consing up
10972 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10977 /* If we got this far, we need to build an exp. */
10978 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10979 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
10980 return ret
? ret
: exp
;
10984 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10987 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10988 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10989 of arguments in ARGS to be omitted. OLDNARGS is the number of
10990 elements in ARGS. */
10993 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10994 int skip
, tree fndecl
, int n
, va_list newargs
)
10996 int nargs
= oldnargs
- skip
+ n
;
11003 buffer
= XALLOCAVEC (tree
, nargs
);
11004 for (i
= 0; i
< n
; i
++)
11005 buffer
[i
] = va_arg (newargs
, tree
);
11006 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11007 buffer
[i
] = args
[j
];
11010 buffer
= args
+ skip
;
11012 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11015 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11016 list ARGS along with N new arguments specified as the "..."
11017 parameters. SKIP is the number of arguments in ARGS to be omitted.
11018 OLDNARGS is the number of elements in ARGS. */
11021 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11022 int skip
, tree fndecl
, int n
, ...)
11028 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11034 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11035 along with N new arguments specified as the "..." parameters. SKIP
11036 is the number of arguments in EXP to be omitted. This function is used
11037 to do varargs-to-varargs transformations. */
11040 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11046 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11047 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11053 /* Validate a single argument ARG against a tree code CODE representing
11057 validate_arg (const_tree arg
, enum tree_code code
)
11061 else if (code
== POINTER_TYPE
)
11062 return POINTER_TYPE_P (TREE_TYPE (arg
));
11063 else if (code
== INTEGER_TYPE
)
11064 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11065 return code
== TREE_CODE (TREE_TYPE (arg
));
11068 /* This function validates the types of a function call argument list
11069 against a specified list of tree_codes. If the last specifier is a 0,
11070 that represents an ellipses, otherwise the last specifier must be a
11073 This is the GIMPLE version of validate_arglist. Eventually we want to
11074 completely convert builtins.c to work from GIMPLEs and the tree based
11075 validate_arglist will then be removed. */
11078 validate_gimple_arglist (const_gimple call
, ...)
11080 enum tree_code code
;
11086 va_start (ap
, call
);
11091 code
= (enum tree_code
) va_arg (ap
, int);
11095 /* This signifies an ellipses, any further arguments are all ok. */
11099 /* This signifies an endlink, if no arguments remain, return
11100 true, otherwise return false. */
11101 res
= (i
== gimple_call_num_args (call
));
11104 /* If no parameters remain or the parameter's code does not
11105 match the specified code, return false. Otherwise continue
11106 checking any remaining arguments. */
11107 arg
= gimple_call_arg (call
, i
++);
11108 if (!validate_arg (arg
, code
))
11115 /* We need gotos here since we can only have one VA_CLOSE in a
11123 /* This function validates the types of a function call argument list
11124 against a specified list of tree_codes. If the last specifier is a 0,
11125 that represents an ellipses, otherwise the last specifier must be a
11129 validate_arglist (const_tree callexpr
, ...)
11131 enum tree_code code
;
11134 const_call_expr_arg_iterator iter
;
11137 va_start (ap
, callexpr
);
11138 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11142 code
= (enum tree_code
) va_arg (ap
, int);
11146 /* This signifies an ellipses, any further arguments are all ok. */
11150 /* This signifies an endlink, if no arguments remain, return
11151 true, otherwise return false. */
11152 res
= !more_const_call_expr_args_p (&iter
);
11155 /* If no parameters remain or the parameter's code does not
11156 match the specified code, return false. Otherwise continue
11157 checking any remaining arguments. */
11158 arg
= next_const_call_expr_arg (&iter
);
11159 if (!validate_arg (arg
, code
))
11166 /* We need gotos here since we can only have one VA_CLOSE in a
11174 /* Default target-specific builtin expander that does nothing. */
11177 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11178 rtx target ATTRIBUTE_UNUSED
,
11179 rtx subtarget ATTRIBUTE_UNUSED
,
11180 enum machine_mode mode ATTRIBUTE_UNUSED
,
11181 int ignore ATTRIBUTE_UNUSED
)
11186 /* Returns true is EXP represents data that would potentially reside
11187 in a readonly section. */
11190 readonly_data_expr (tree exp
)
11194 if (TREE_CODE (exp
) != ADDR_EXPR
)
11197 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11201 /* Make sure we call decl_readonly_section only for trees it
11202 can handle (since it returns true for everything it doesn't
11204 if (TREE_CODE (exp
) == STRING_CST
11205 || TREE_CODE (exp
) == CONSTRUCTOR
11206 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11207 return decl_readonly_section (exp
, 0);
11212 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11213 to the call, and TYPE is its return type.
11215 Return NULL_TREE if no simplification was possible, otherwise return the
11216 simplified form of the call as a tree.
11218 The simplified form may be a constant or other expression which
11219 computes the same value, but in a more efficient manner (including
11220 calls to other builtin functions).
11222 The call may contain arguments which need to be evaluated, but
11223 which are not useful to determine the result of the call. In
11224 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11225 COMPOUND_EXPR will be an argument which must be evaluated.
11226 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11227 COMPOUND_EXPR in the chain will contain the tree for the simplified
11228 form of the builtin function call. */
11231 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11233 if (!validate_arg (s1
, POINTER_TYPE
)
11234 || !validate_arg (s2
, POINTER_TYPE
))
11239 const char *p1
, *p2
;
11241 p2
= c_getstr (s2
);
11245 p1
= c_getstr (s1
);
11248 const char *r
= strstr (p1
, p2
);
11252 return build_int_cst (TREE_TYPE (s1
), 0);
11254 /* Return an offset into the constant string argument. */
11255 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11256 s1
, size_int (r
- p1
));
11257 return fold_convert_loc (loc
, type
, tem
);
11260 /* The argument is const char *, and the result is char *, so we need
11261 a type conversion here to avoid a warning. */
11263 return fold_convert_loc (loc
, type
, s1
);
11268 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11272 /* New argument list transforming strstr(s1, s2) to
11273 strchr(s1, s2[0]). */
11274 return build_call_expr_loc (loc
, fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11278 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11279 the call, and TYPE is its return type.
11281 Return NULL_TREE if no simplification was possible, otherwise return the
11282 simplified form of the call as a tree.
11284 The simplified form may be a constant or other expression which
11285 computes the same value, but in a more efficient manner (including
11286 calls to other builtin functions).
11288 The call may contain arguments which need to be evaluated, but
11289 which are not useful to determine the result of the call. In
11290 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11291 COMPOUND_EXPR will be an argument which must be evaluated.
11292 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11293 COMPOUND_EXPR in the chain will contain the tree for the simplified
11294 form of the builtin function call. */
11297 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11299 if (!validate_arg (s1
, POINTER_TYPE
)
11300 || !validate_arg (s2
, INTEGER_TYPE
))
11306 if (TREE_CODE (s2
) != INTEGER_CST
)
11309 p1
= c_getstr (s1
);
11316 if (target_char_cast (s2
, &c
))
11319 r
= strchr (p1
, c
);
11322 return build_int_cst (TREE_TYPE (s1
), 0);
11324 /* Return an offset into the constant string argument. */
11325 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11326 s1
, size_int (r
- p1
));
11327 return fold_convert_loc (loc
, type
, tem
);
11333 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11334 the call, and TYPE is its return type.
11336 Return NULL_TREE if no simplification was possible, otherwise return the
11337 simplified form of the call as a tree.
11339 The simplified form may be a constant or other expression which
11340 computes the same value, but in a more efficient manner (including
11341 calls to other builtin functions).
11343 The call may contain arguments which need to be evaluated, but
11344 which are not useful to determine the result of the call. In
11345 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11346 COMPOUND_EXPR will be an argument which must be evaluated.
11347 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11348 COMPOUND_EXPR in the chain will contain the tree for the simplified
11349 form of the builtin function call. */
11352 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11354 if (!validate_arg (s1
, POINTER_TYPE
)
11355 || !validate_arg (s2
, INTEGER_TYPE
))
11362 if (TREE_CODE (s2
) != INTEGER_CST
)
11365 p1
= c_getstr (s1
);
11372 if (target_char_cast (s2
, &c
))
11375 r
= strrchr (p1
, c
);
11378 return build_int_cst (TREE_TYPE (s1
), 0);
11380 /* Return an offset into the constant string argument. */
11381 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11382 s1
, size_int (r
- p1
));
11383 return fold_convert_loc (loc
, type
, tem
);
11386 if (! integer_zerop (s2
))
11389 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11393 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11394 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11398 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11399 to the call, and TYPE is its return type.
11401 Return NULL_TREE if no simplification was possible, otherwise return the
11402 simplified form of the call as a tree.
11404 The simplified form may be a constant or other expression which
11405 computes the same value, but in a more efficient manner (including
11406 calls to other builtin functions).
11408 The call may contain arguments which need to be evaluated, but
11409 which are not useful to determine the result of the call. In
11410 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11411 COMPOUND_EXPR will be an argument which must be evaluated.
11412 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11413 COMPOUND_EXPR in the chain will contain the tree for the simplified
11414 form of the builtin function call. */
11417 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11419 if (!validate_arg (s1
, POINTER_TYPE
)
11420 || !validate_arg (s2
, POINTER_TYPE
))
11425 const char *p1
, *p2
;
11427 p2
= c_getstr (s2
);
11431 p1
= c_getstr (s1
);
11434 const char *r
= strpbrk (p1
, p2
);
11438 return build_int_cst (TREE_TYPE (s1
), 0);
11440 /* Return an offset into the constant string argument. */
11441 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11442 s1
, size_int (r
- p1
));
11443 return fold_convert_loc (loc
, type
, tem
);
11447 /* strpbrk(x, "") == NULL.
11448 Evaluate and ignore s1 in case it had side-effects. */
11449 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11452 return NULL_TREE
; /* Really call strpbrk. */
11454 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11458 /* New argument list transforming strpbrk(s1, s2) to
11459 strchr(s1, s2[0]). */
11460 return build_call_expr_loc (loc
, fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11464 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11467 Return NULL_TREE if no simplification was possible, otherwise return the
11468 simplified form of the call as a tree.
11470 The simplified form may be a constant or other expression which
11471 computes the same value, but in a more efficient manner (including
11472 calls to other builtin functions).
11474 The call may contain arguments which need to be evaluated, but
11475 which are not useful to determine the result of the call. In
11476 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11477 COMPOUND_EXPR will be an argument which must be evaluated.
11478 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11479 COMPOUND_EXPR in the chain will contain the tree for the simplified
11480 form of the builtin function call. */
11483 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11485 if (!validate_arg (dst
, POINTER_TYPE
)
11486 || !validate_arg (src
, POINTER_TYPE
))
11490 const char *p
= c_getstr (src
);
11492 /* If the string length is zero, return the dst parameter. */
11493 if (p
&& *p
== '\0')
11496 if (optimize_insn_for_speed_p ())
11498 /* See if we can store by pieces into (dst + strlen(dst)). */
11500 tree strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11501 tree strcpy_fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11503 if (!strlen_fn
|| !strcpy_fn
)
11506 /* If we don't have a movstr we don't want to emit an strcpy
11507 call. We have to do that if the length of the source string
11508 isn't computable (in that case we can use memcpy probably
11509 later expanding to a sequence of mov instructions). If we
11510 have movstr instructions we can emit strcpy calls. */
11513 tree len
= c_strlen (src
, 1);
11514 if (! len
|| TREE_SIDE_EFFECTS (len
))
11518 /* Stabilize the argument list. */
11519 dst
= builtin_save_expr (dst
);
11521 /* Create strlen (dst). */
11522 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11523 /* Create (dst p+ strlen (dst)). */
11525 newdst
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
11526 TREE_TYPE (dst
), dst
, newdst
);
11527 newdst
= builtin_save_expr (newdst
);
11529 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11530 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11536 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11537 arguments to the call.
11539 Return NULL_TREE if no simplification was possible, otherwise return the
11540 simplified form of the call as a tree.
11542 The simplified form may be a constant or other expression which
11543 computes the same value, but in a more efficient manner (including
11544 calls to other builtin functions).
11546 The call may contain arguments which need to be evaluated, but
11547 which are not useful to determine the result of the call. In
11548 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11549 COMPOUND_EXPR will be an argument which must be evaluated.
11550 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11551 COMPOUND_EXPR in the chain will contain the tree for the simplified
11552 form of the builtin function call. */
11555 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11557 if (!validate_arg (dst
, POINTER_TYPE
)
11558 || !validate_arg (src
, POINTER_TYPE
)
11559 || !validate_arg (len
, INTEGER_TYPE
))
11563 const char *p
= c_getstr (src
);
11565 /* If the requested length is zero, or the src parameter string
11566 length is zero, return the dst parameter. */
11567 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11568 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11570 /* If the requested len is greater than or equal to the string
11571 length, call strcat. */
11572 if (TREE_CODE (len
) == INTEGER_CST
&& p
11573 && compare_tree_int (len
, strlen (p
)) >= 0)
11575 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
11577 /* If the replacement _DECL isn't initialized, don't do the
11582 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11588 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11591 Return NULL_TREE if no simplification was possible, otherwise return the
11592 simplified form of the call as a tree.
11594 The simplified form may be a constant or other expression which
11595 computes the same value, but in a more efficient manner (including
11596 calls to other builtin functions).
11598 The call may contain arguments which need to be evaluated, but
11599 which are not useful to determine the result of the call. In
11600 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11601 COMPOUND_EXPR will be an argument which must be evaluated.
11602 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11603 COMPOUND_EXPR in the chain will contain the tree for the simplified
11604 form of the builtin function call. */
11607 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11609 if (!validate_arg (s1
, POINTER_TYPE
)
11610 || !validate_arg (s2
, POINTER_TYPE
))
11614 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11616 /* If both arguments are constants, evaluate at compile-time. */
11619 const size_t r
= strspn (p1
, p2
);
11620 return size_int (r
);
11623 /* If either argument is "", return NULL_TREE. */
11624 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11625 /* Evaluate and ignore both arguments in case either one has
11627 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11633 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11636 Return NULL_TREE if no simplification was possible, otherwise return the
11637 simplified form of the call as a tree.
11639 The simplified form may be a constant or other expression which
11640 computes the same value, but in a more efficient manner (including
11641 calls to other builtin functions).
11643 The call may contain arguments which need to be evaluated, but
11644 which are not useful to determine the result of the call. In
11645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11646 COMPOUND_EXPR will be an argument which must be evaluated.
11647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11648 COMPOUND_EXPR in the chain will contain the tree for the simplified
11649 form of the builtin function call. */
11652 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11654 if (!validate_arg (s1
, POINTER_TYPE
)
11655 || !validate_arg (s2
, POINTER_TYPE
))
11659 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11661 /* If both arguments are constants, evaluate at compile-time. */
11664 const size_t r
= strcspn (p1
, p2
);
11665 return size_int (r
);
11668 /* If the first argument is "", return NULL_TREE. */
11669 if (p1
&& *p1
== '\0')
11671 /* Evaluate and ignore argument s2 in case it has
11673 return omit_one_operand_loc (loc
, size_type_node
,
11674 size_zero_node
, s2
);
11677 /* If the second argument is "", return __builtin_strlen(s1). */
11678 if (p2
&& *p2
== '\0')
11680 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11682 /* If the replacement _DECL isn't initialized, don't do the
11687 return build_call_expr_loc (loc
, fn
, 1, s1
);
11693 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11694 to the call. IGNORE is true if the value returned
11695 by the builtin will be ignored. UNLOCKED is true is true if this
11696 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11697 the known length of the string. Return NULL_TREE if no simplification
11701 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
11702 bool ignore
, bool unlocked
, tree len
)
11704 /* If we're using an unlocked function, assume the other unlocked
11705 functions exist explicitly. */
11706 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11707 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11708 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11709 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11711 /* If the return value is used, don't do the transformation. */
11715 /* Verify the arguments in the original call. */
11716 if (!validate_arg (arg0
, POINTER_TYPE
)
11717 || !validate_arg (arg1
, POINTER_TYPE
))
11721 len
= c_strlen (arg0
, 0);
11723 /* Get the length of the string passed to fputs. If the length
11724 can't be determined, punt. */
11726 || TREE_CODE (len
) != INTEGER_CST
)
11729 switch (compare_tree_int (len
, 1))
11731 case -1: /* length is 0, delete the call entirely . */
11732 return omit_one_operand_loc (loc
, integer_type_node
,
11733 integer_zero_node
, arg1
);;
11735 case 0: /* length is 1, call fputc. */
11737 const char *p
= c_getstr (arg0
);
11742 return build_call_expr_loc (loc
, fn_fputc
, 2,
11743 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11749 case 1: /* length is greater than 1, call fwrite. */
11751 /* If optimizing for size keep fputs. */
11752 if (optimize_function_for_size_p (cfun
))
11754 /* New argument list transforming fputs(string, stream) to
11755 fwrite(string, 1, len, stream). */
11757 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
11758 size_one_node
, len
, arg1
);
11763 gcc_unreachable ();
11768 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11769 produced. False otherwise. This is done so that we don't output the error
11770 or warning twice or three times. */
11773 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11775 tree fntype
= TREE_TYPE (current_function_decl
);
11776 int nargs
= call_expr_nargs (exp
);
11779 if (!stdarg_p (fntype
))
11781 error ("%<va_start%> used in function with fixed args");
11787 if (va_start_p
&& (nargs
!= 2))
11789 error ("wrong number of arguments to function %<va_start%>");
11792 arg
= CALL_EXPR_ARG (exp
, 1);
11794 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11795 when we checked the arguments and if needed issued a warning. */
11800 /* Evidently an out of date version of <stdarg.h>; can't validate
11801 va_start's second argument, but can still work as intended. */
11802 warning (0, "%<__builtin_next_arg%> called without an argument");
11805 else if (nargs
> 1)
11807 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11810 arg
= CALL_EXPR_ARG (exp
, 0);
11813 if (TREE_CODE (arg
) == SSA_NAME
)
11814 arg
= SSA_NAME_VAR (arg
);
11816 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11817 or __builtin_next_arg (0) the first time we see it, after checking
11818 the arguments and if needed issuing a warning. */
11819 if (!integer_zerop (arg
))
11821 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11823 /* Strip off all nops for the sake of the comparison. This
11824 is not quite the same as STRIP_NOPS. It does more.
11825 We must also strip off INDIRECT_EXPR for C++ reference
11827 while (CONVERT_EXPR_P (arg
)
11828 || TREE_CODE (arg
) == INDIRECT_REF
)
11829 arg
= TREE_OPERAND (arg
, 0);
11830 if (arg
!= last_parm
)
11832 /* FIXME: Sometimes with the tree optimizers we can get the
11833 not the last argument even though the user used the last
11834 argument. We just warn and set the arg to be the last
11835 argument so that we will get wrong-code because of
11837 warning (0, "second parameter of %<va_start%> not last named argument");
11840 /* Undefined by C99 7.15.1.4p4 (va_start):
11841 "If the parameter parmN is declared with the register storage
11842 class, with a function or array type, or with a type that is
11843 not compatible with the type that results after application of
11844 the default argument promotions, the behavior is undefined."
11846 else if (DECL_REGISTER (arg
))
11847 warning (0, "undefined behaviour when second parameter of "
11848 "%<va_start%> is declared with %<register%> storage");
11850 /* We want to verify the second parameter just once before the tree
11851 optimizers are run and then avoid keeping it in the tree,
11852 as otherwise we could warn even for correct code like:
11853 void foo (int i, ...)
11854 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11856 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11858 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11864 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11865 ORIG may be null if this is a 2-argument call. We don't attempt to
11866 simplify calls with more than 3 arguments.
11868 Return NULL_TREE if no simplification was possible, otherwise return the
11869 simplified form of the call as a tree. If IGNORED is true, it means that
11870 the caller does not use the returned value of the function. */
11873 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
11874 tree orig
, int ignored
)
11877 const char *fmt_str
= NULL
;
11879 /* Verify the required arguments in the original call. We deal with two
11880 types of sprintf() calls: 'sprintf (str, fmt)' and
11881 'sprintf (dest, "%s", orig)'. */
11882 if (!validate_arg (dest
, POINTER_TYPE
)
11883 || !validate_arg (fmt
, POINTER_TYPE
))
11885 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11888 /* Check whether the format is a literal string constant. */
11889 fmt_str
= c_getstr (fmt
);
11890 if (fmt_str
== NULL
)
11894 retval
= NULL_TREE
;
11896 if (!init_target_chars ())
11899 /* If the format doesn't contain % args or %%, use strcpy. */
11900 if (strchr (fmt_str
, target_percent
) == NULL
)
11902 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11907 /* Don't optimize sprintf (buf, "abc", ptr++). */
11911 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11912 'format' is known to contain no % formats. */
11913 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
11915 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11918 /* If the format is "%s", use strcpy if the result isn't used. */
11919 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11922 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11927 /* Don't crash on sprintf (str1, "%s"). */
11931 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11934 retval
= c_strlen (orig
, 1);
11935 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11938 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
11941 if (call
&& retval
)
11943 retval
= fold_convert_loc
11944 (loc
, TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11946 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11952 /* Expand a call EXP to __builtin_object_size. */
11955 expand_builtin_object_size (tree exp
)
11958 int object_size_type
;
11959 tree fndecl
= get_callee_fndecl (exp
);
11961 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11963 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11965 expand_builtin_trap ();
11969 ost
= CALL_EXPR_ARG (exp
, 1);
11972 if (TREE_CODE (ost
) != INTEGER_CST
11973 || tree_int_cst_sgn (ost
) < 0
11974 || compare_tree_int (ost
, 3) > 0)
11976 error ("%Klast argument of %D is not integer constant between 0 and 3",
11978 expand_builtin_trap ();
11982 object_size_type
= tree_low_cst (ost
, 0);
11984 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11987 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11988 FCODE is the BUILT_IN_* to use.
11989 Return NULL_RTX if we failed; the caller should emit a normal call,
11990 otherwise try to get the result in TARGET, if convenient (and in
11991 mode MODE if that's convenient). */
11994 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11995 enum built_in_function fcode
)
11997 tree dest
, src
, len
, size
;
11999 if (!validate_arglist (exp
,
12001 fcode
== BUILT_IN_MEMSET_CHK
12002 ? INTEGER_TYPE
: POINTER_TYPE
,
12003 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12006 dest
= CALL_EXPR_ARG (exp
, 0);
12007 src
= CALL_EXPR_ARG (exp
, 1);
12008 len
= CALL_EXPR_ARG (exp
, 2);
12009 size
= CALL_EXPR_ARG (exp
, 3);
12011 if (! host_integerp (size
, 1))
12014 if (host_integerp (len
, 1) || integer_all_onesp (size
))
12018 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12020 warning_at (tree_nonartificial_location (exp
),
12021 0, "%Kcall to %D will always overflow destination buffer",
12022 exp
, get_callee_fndecl (exp
));
12027 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12028 mem{cpy,pcpy,move,set} is available. */
12031 case BUILT_IN_MEMCPY_CHK
:
12032 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
12034 case BUILT_IN_MEMPCPY_CHK
:
12035 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
12037 case BUILT_IN_MEMMOVE_CHK
:
12038 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
12040 case BUILT_IN_MEMSET_CHK
:
12041 fn
= built_in_decls
[BUILT_IN_MEMSET
];
12050 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12051 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12052 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12053 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12055 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12059 unsigned int dest_align
12060 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
12062 /* If DEST is not a pointer type, call the normal function. */
12063 if (dest_align
== 0)
12066 /* If SRC and DEST are the same (and not volatile), do nothing. */
12067 if (operand_equal_p (src
, dest
, 0))
12071 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12073 /* Evaluate and ignore LEN in case it has side-effects. */
12074 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12075 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12078 expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
12079 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12082 /* __memmove_chk special case. */
12083 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12085 unsigned int src_align
12086 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
12088 if (src_align
== 0)
12091 /* If src is categorized for a readonly section we can use
12092 normal __memcpy_chk. */
12093 if (readonly_data_expr (src
))
12095 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12098 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12099 dest
, src
, len
, size
);
12100 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12101 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12102 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12109 /* Emit warning if a buffer overflow is detected at compile time. */
12112 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12116 location_t loc
= tree_nonartificial_location (exp
);
12120 case BUILT_IN_STRCPY_CHK
:
12121 case BUILT_IN_STPCPY_CHK
:
12122 /* For __strcat_chk the warning will be emitted only if overflowing
12123 by at least strlen (dest) + 1 bytes. */
12124 case BUILT_IN_STRCAT_CHK
:
12125 len
= CALL_EXPR_ARG (exp
, 1);
12126 size
= CALL_EXPR_ARG (exp
, 2);
12129 case BUILT_IN_STRNCAT_CHK
:
12130 case BUILT_IN_STRNCPY_CHK
:
12131 len
= CALL_EXPR_ARG (exp
, 2);
12132 size
= CALL_EXPR_ARG (exp
, 3);
12134 case BUILT_IN_SNPRINTF_CHK
:
12135 case BUILT_IN_VSNPRINTF_CHK
:
12136 len
= CALL_EXPR_ARG (exp
, 1);
12137 size
= CALL_EXPR_ARG (exp
, 3);
12140 gcc_unreachable ();
12146 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12151 len
= c_strlen (len
, 1);
12152 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12155 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12157 tree src
= CALL_EXPR_ARG (exp
, 1);
12158 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12160 src
= c_strlen (src
, 1);
12161 if (! src
|| ! host_integerp (src
, 1))
12163 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12164 exp
, get_callee_fndecl (exp
));
12167 else if (tree_int_cst_lt (src
, size
))
12170 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12173 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12174 exp
, get_callee_fndecl (exp
));
12177 /* Emit warning if a buffer overflow is detected at compile time
12178 in __sprintf_chk/__vsprintf_chk calls. */
12181 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12183 tree size
, len
, fmt
;
12184 const char *fmt_str
;
12185 int nargs
= call_expr_nargs (exp
);
12187 /* Verify the required arguments in the original call. */
12191 size
= CALL_EXPR_ARG (exp
, 2);
12192 fmt
= CALL_EXPR_ARG (exp
, 3);
12194 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12197 /* Check whether the format is a literal string constant. */
12198 fmt_str
= c_getstr (fmt
);
12199 if (fmt_str
== NULL
)
12202 if (!init_target_chars ())
12205 /* If the format doesn't contain % args or %%, we know its size. */
12206 if (strchr (fmt_str
, target_percent
) == 0)
12207 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12208 /* If the format is "%s" and first ... argument is a string literal,
12210 else if (fcode
== BUILT_IN_SPRINTF_CHK
12211 && strcmp (fmt_str
, target_percent_s
) == 0)
12217 arg
= CALL_EXPR_ARG (exp
, 4);
12218 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12221 len
= c_strlen (arg
, 1);
12222 if (!len
|| ! host_integerp (len
, 1))
12228 if (! tree_int_cst_lt (len
, size
))
12229 warning_at (tree_nonartificial_location (exp
),
12230 0, "%Kcall to %D will always overflow destination buffer",
12231 exp
, get_callee_fndecl (exp
));
12234 /* Emit warning if a free is called with address of a variable. */
12237 maybe_emit_free_warning (tree exp
)
12239 tree arg
= CALL_EXPR_ARG (exp
, 0);
12242 if (TREE_CODE (arg
) != ADDR_EXPR
)
12245 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12246 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12249 if (SSA_VAR_P (arg
))
12250 warning_at (tree_nonartificial_location (exp
),
12251 0, "%Kattempt to free a non-heap object %qD", exp
, arg
);
12253 warning_at (tree_nonartificial_location (exp
),
12254 0, "%Kattempt to free a non-heap object", exp
);
12257 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12261 fold_builtin_object_size (tree ptr
, tree ost
)
12263 unsigned HOST_WIDE_INT bytes
;
12264 int object_size_type
;
12266 if (!validate_arg (ptr
, POINTER_TYPE
)
12267 || !validate_arg (ost
, INTEGER_TYPE
))
12272 if (TREE_CODE (ost
) != INTEGER_CST
12273 || tree_int_cst_sgn (ost
) < 0
12274 || compare_tree_int (ost
, 3) > 0)
12277 object_size_type
= tree_low_cst (ost
, 0);
12279 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12280 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12281 and (size_t) 0 for types 2 and 3. */
12282 if (TREE_SIDE_EFFECTS (ptr
))
12283 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12285 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12287 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12288 if (double_int_fits_to_tree_p (size_type_node
,
12289 uhwi_to_double_int (bytes
)))
12290 return build_int_cstu (size_type_node
, bytes
);
12292 else if (TREE_CODE (ptr
) == SSA_NAME
)
12294 /* If object size is not known yet, delay folding until
12295 later. Maybe subsequent passes will help determining
12297 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12298 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12299 && double_int_fits_to_tree_p (size_type_node
,
12300 uhwi_to_double_int (bytes
)))
12301 return build_int_cstu (size_type_node
, bytes
);
12307 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12308 DEST, SRC, LEN, and SIZE are the arguments to the call.
12309 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12310 code of the builtin. If MAXLEN is not NULL, it is maximum length
12311 passed as third argument. */
12314 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12315 tree dest
, tree src
, tree len
, tree size
,
12316 tree maxlen
, bool ignore
,
12317 enum built_in_function fcode
)
12321 if (!validate_arg (dest
, POINTER_TYPE
)
12322 || !validate_arg (src
,
12323 (fcode
== BUILT_IN_MEMSET_CHK
12324 ? INTEGER_TYPE
: POINTER_TYPE
))
12325 || !validate_arg (len
, INTEGER_TYPE
)
12326 || !validate_arg (size
, INTEGER_TYPE
))
12329 /* If SRC and DEST are the same (and not volatile), return DEST
12330 (resp. DEST+LEN for __mempcpy_chk). */
12331 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12333 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12334 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12338 tree temp
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (dest
),
12340 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12344 if (! host_integerp (size
, 1))
12347 if (! integer_all_onesp (size
))
12349 if (! host_integerp (len
, 1))
12351 /* If LEN is not constant, try MAXLEN too.
12352 For MAXLEN only allow optimizing into non-_ocs function
12353 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12354 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12356 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12358 /* (void) __mempcpy_chk () can be optimized into
12359 (void) __memcpy_chk (). */
12360 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12364 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12372 if (tree_int_cst_lt (size
, maxlen
))
12377 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12378 mem{cpy,pcpy,move,set} is available. */
12381 case BUILT_IN_MEMCPY_CHK
:
12382 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
12384 case BUILT_IN_MEMPCPY_CHK
:
12385 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
12387 case BUILT_IN_MEMMOVE_CHK
:
12388 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
12390 case BUILT_IN_MEMSET_CHK
:
12391 fn
= built_in_decls
[BUILT_IN_MEMSET
];
12400 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12403 /* Fold a call to the __st[rp]cpy_chk builtin.
12404 DEST, SRC, and SIZE are the arguments to the call.
12405 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12406 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12407 strings passed as second argument. */
12410 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12411 tree src
, tree size
,
12412 tree maxlen
, bool ignore
,
12413 enum built_in_function fcode
)
12417 if (!validate_arg (dest
, POINTER_TYPE
)
12418 || !validate_arg (src
, POINTER_TYPE
)
12419 || !validate_arg (size
, INTEGER_TYPE
))
12422 /* If SRC and DEST are the same (and not volatile), return DEST. */
12423 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12424 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12426 if (! host_integerp (size
, 1))
12429 if (! integer_all_onesp (size
))
12431 len
= c_strlen (src
, 1);
12432 if (! len
|| ! host_integerp (len
, 1))
12434 /* If LEN is not constant, try MAXLEN too.
12435 For MAXLEN only allow optimizing into non-_ocs function
12436 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12437 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12439 if (fcode
== BUILT_IN_STPCPY_CHK
)
12444 /* If return value of __stpcpy_chk is ignored,
12445 optimize into __strcpy_chk. */
12446 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
12450 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12453 if (! len
|| TREE_SIDE_EFFECTS (len
))
12456 /* If c_strlen returned something, but not a constant,
12457 transform __strcpy_chk into __memcpy_chk. */
12458 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12462 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
12463 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12464 build_call_expr_loc (loc
, fn
, 4,
12465 dest
, src
, len
, size
));
12471 if (! tree_int_cst_lt (maxlen
, size
))
12475 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12476 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
12477 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
12481 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12484 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12485 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12486 length passed as third argument. */
12489 fold_builtin_strncpy_chk (location_t loc
, tree dest
, tree src
,
12490 tree len
, tree size
, tree maxlen
)
12494 if (!validate_arg (dest
, POINTER_TYPE
)
12495 || !validate_arg (src
, POINTER_TYPE
)
12496 || !validate_arg (len
, INTEGER_TYPE
)
12497 || !validate_arg (size
, INTEGER_TYPE
))
12500 if (! host_integerp (size
, 1))
12503 if (! integer_all_onesp (size
))
12505 if (! host_integerp (len
, 1))
12507 /* If LEN is not constant, try MAXLEN too.
12508 For MAXLEN only allow optimizing into non-_ocs function
12509 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12510 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12516 if (tree_int_cst_lt (size
, maxlen
))
12520 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12521 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
12525 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12528 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12529 are the arguments to the call. */
12532 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
12533 tree src
, tree size
)
12538 if (!validate_arg (dest
, POINTER_TYPE
)
12539 || !validate_arg (src
, POINTER_TYPE
)
12540 || !validate_arg (size
, INTEGER_TYPE
))
12543 p
= c_getstr (src
);
12544 /* If the SRC parameter is "", return DEST. */
12545 if (p
&& *p
== '\0')
12546 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12548 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12551 /* If __builtin_strcat_chk is used, assume strcat is available. */
12552 fn
= built_in_decls
[BUILT_IN_STRCAT
];
12556 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12559 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12563 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
12564 tree dest
, tree src
, tree len
, tree size
)
12569 if (!validate_arg (dest
, POINTER_TYPE
)
12570 || !validate_arg (src
, POINTER_TYPE
)
12571 || !validate_arg (size
, INTEGER_TYPE
)
12572 || !validate_arg (size
, INTEGER_TYPE
))
12575 p
= c_getstr (src
);
12576 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12577 if (p
&& *p
== '\0')
12578 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12579 else if (integer_zerop (len
))
12580 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12582 if (! host_integerp (size
, 1))
12585 if (! integer_all_onesp (size
))
12587 tree src_len
= c_strlen (src
, 1);
12589 && host_integerp (src_len
, 1)
12590 && host_integerp (len
, 1)
12591 && ! tree_int_cst_lt (len
, src_len
))
12593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12594 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
12598 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12603 /* If __builtin_strncat_chk is used, assume strncat is available. */
12604 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
12608 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12611 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12612 Return NULL_TREE if a normal call should be emitted rather than
12613 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12614 or BUILT_IN_VSPRINTF_CHK. */
12617 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
12618 enum built_in_function fcode
)
12620 tree dest
, size
, len
, fn
, fmt
, flag
;
12621 const char *fmt_str
;
12623 /* Verify the required arguments in the original call. */
12627 if (!validate_arg (dest
, POINTER_TYPE
))
12630 if (!validate_arg (flag
, INTEGER_TYPE
))
12633 if (!validate_arg (size
, INTEGER_TYPE
))
12636 if (!validate_arg (fmt
, POINTER_TYPE
))
12639 if (! host_integerp (size
, 1))
12644 if (!init_target_chars ())
12647 /* Check whether the format is a literal string constant. */
12648 fmt_str
= c_getstr (fmt
);
12649 if (fmt_str
!= NULL
)
12651 /* If the format doesn't contain % args or %%, we know the size. */
12652 if (strchr (fmt_str
, target_percent
) == 0)
12654 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
12655 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12657 /* If the format is "%s" and first ... argument is a string literal,
12658 we know the size too. */
12659 else if (fcode
== BUILT_IN_SPRINTF_CHK
12660 && strcmp (fmt_str
, target_percent_s
) == 0)
12667 if (validate_arg (arg
, POINTER_TYPE
))
12669 len
= c_strlen (arg
, 1);
12670 if (! len
|| ! host_integerp (len
, 1))
12677 if (! integer_all_onesp (size
))
12679 if (! len
|| ! tree_int_cst_lt (len
, size
))
12683 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12684 or if format doesn't contain % chars or is "%s". */
12685 if (! integer_zerop (flag
))
12687 if (fmt_str
== NULL
)
12689 if (strchr (fmt_str
, target_percent
) != NULL
12690 && strcmp (fmt_str
, target_percent_s
))
12694 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12695 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
12696 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
12700 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
12703 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12704 a normal call should be emitted rather than expanding the function
12705 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12708 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
12709 enum built_in_function fcode
)
12711 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
12712 CALL_EXPR_ARGP (exp
), fcode
);
12715 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12716 NULL_TREE if a normal call should be emitted rather than expanding
12717 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12718 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12719 passed as second argument. */
12722 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
12723 tree maxlen
, enum built_in_function fcode
)
12725 tree dest
, size
, len
, fn
, fmt
, flag
;
12726 const char *fmt_str
;
12728 /* Verify the required arguments in the original call. */
12732 if (!validate_arg (dest
, POINTER_TYPE
))
12735 if (!validate_arg (len
, INTEGER_TYPE
))
12738 if (!validate_arg (flag
, INTEGER_TYPE
))
12741 if (!validate_arg (size
, INTEGER_TYPE
))
12744 if (!validate_arg (fmt
, POINTER_TYPE
))
12747 if (! host_integerp (size
, 1))
12750 if (! integer_all_onesp (size
))
12752 if (! host_integerp (len
, 1))
12754 /* If LEN is not constant, try MAXLEN too.
12755 For MAXLEN only allow optimizing into non-_ocs function
12756 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12757 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12763 if (tree_int_cst_lt (size
, maxlen
))
12767 if (!init_target_chars ())
12770 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12771 or if format doesn't contain % chars or is "%s". */
12772 if (! integer_zerop (flag
))
12774 fmt_str
= c_getstr (fmt
);
12775 if (fmt_str
== NULL
)
12777 if (strchr (fmt_str
, target_percent
) != NULL
12778 && strcmp (fmt_str
, target_percent_s
))
12782 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12784 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12785 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12789 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
12792 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12793 a normal call should be emitted rather than expanding the function
12794 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12795 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12796 passed as second argument. */
12799 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
12800 enum built_in_function fcode
)
12802 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
12803 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
12806 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12807 FMT and ARG are the arguments to the call; we don't fold cases with
12808 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12810 Return NULL_TREE if no simplification was possible, otherwise return the
12811 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12812 code of the function to be simplified. */
12815 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
12816 tree arg
, bool ignore
,
12817 enum built_in_function fcode
)
12819 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12820 const char *fmt_str
= NULL
;
12822 /* If the return value is used, don't do the transformation. */
12826 /* Verify the required arguments in the original call. */
12827 if (!validate_arg (fmt
, POINTER_TYPE
))
12830 /* Check whether the format is a literal string constant. */
12831 fmt_str
= c_getstr (fmt
);
12832 if (fmt_str
== NULL
)
12835 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12837 /* If we're using an unlocked function, assume the other
12838 unlocked functions exist explicitly. */
12839 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12840 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12844 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12845 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12848 if (!init_target_chars ())
12851 if (strcmp (fmt_str
, target_percent_s
) == 0
12852 || strchr (fmt_str
, target_percent
) == NULL
)
12856 if (strcmp (fmt_str
, target_percent_s
) == 0)
12858 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12861 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12864 str
= c_getstr (arg
);
12870 /* The format specifier doesn't contain any '%' characters. */
12871 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12877 /* If the string was "", printf does nothing. */
12878 if (str
[0] == '\0')
12879 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12881 /* If the string has length of 1, call putchar. */
12882 if (str
[1] == '\0')
12884 /* Given printf("c"), (where c is any one character,)
12885 convert "c"[0] to an int and pass that to the replacement
12887 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12889 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
12893 /* If the string was "string\n", call puts("string"). */
12894 size_t len
= strlen (str
);
12895 if ((unsigned char)str
[len
- 1] == target_newline
12896 && (size_t) (int) len
== len
12900 tree offset_node
, string_cst
;
12902 /* Create a NUL-terminated string that's one char shorter
12903 than the original, stripping off the trailing '\n'. */
12904 newarg
= build_string_literal (len
, str
);
12905 string_cst
= string_constant (newarg
, &offset_node
);
12906 gcc_checking_assert (string_cst
12907 && (TREE_STRING_LENGTH (string_cst
)
12909 && integer_zerop (offset_node
)
12911 TREE_STRING_POINTER (string_cst
)[len
- 1]
12912 == target_newline
);
12913 /* build_string_literal creates a new STRING_CST,
12914 modify it in place to avoid double copying. */
12915 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
12916 newstr
[len
- 1] = '\0';
12918 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
12921 /* We'd like to arrange to call fputs(string,stdout) here,
12922 but we need stdout and don't have a way to get it yet. */
12927 /* The other optimizations can be done only on the non-va_list variants. */
12928 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12931 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12932 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12934 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12937 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
12940 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12941 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12943 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12946 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
12952 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12955 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12956 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12957 more than 3 arguments, and ARG may be null in the 2-argument case.
12959 Return NULL_TREE if no simplification was possible, otherwise return the
12960 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12961 code of the function to be simplified. */
12964 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
12965 tree fmt
, tree arg
, bool ignore
,
12966 enum built_in_function fcode
)
12968 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12969 const char *fmt_str
= NULL
;
12971 /* If the return value is used, don't do the transformation. */
12975 /* Verify the required arguments in the original call. */
12976 if (!validate_arg (fp
, POINTER_TYPE
))
12978 if (!validate_arg (fmt
, POINTER_TYPE
))
12981 /* Check whether the format is a literal string constant. */
12982 fmt_str
= c_getstr (fmt
);
12983 if (fmt_str
== NULL
)
12986 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12988 /* If we're using an unlocked function, assume the other
12989 unlocked functions exist explicitly. */
12990 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12991 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12995 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12996 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12999 if (!init_target_chars ())
13002 /* If the format doesn't contain % args or %%, use strcpy. */
13003 if (strchr (fmt_str
, target_percent
) == NULL
)
13005 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13009 /* If the format specifier was "", fprintf does nothing. */
13010 if (fmt_str
[0] == '\0')
13012 /* If FP has side-effects, just wait until gimplification is
13014 if (TREE_SIDE_EFFECTS (fp
))
13017 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13020 /* When "string" doesn't contain %, replace all cases of
13021 fprintf (fp, string) with fputs (string, fp). The fputs
13022 builtin will take care of special cases like length == 1. */
13024 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13027 /* The other optimizations can be done only on the non-va_list variants. */
13028 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13031 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13032 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13034 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13037 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13040 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13041 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13043 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13046 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13051 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13054 /* Initialize format string characters in the target charset. */
13057 init_target_chars (void)
13062 target_newline
= lang_hooks
.to_target_charset ('\n');
13063 target_percent
= lang_hooks
.to_target_charset ('%');
13064 target_c
= lang_hooks
.to_target_charset ('c');
13065 target_s
= lang_hooks
.to_target_charset ('s');
13066 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13070 target_percent_c
[0] = target_percent
;
13071 target_percent_c
[1] = target_c
;
13072 target_percent_c
[2] = '\0';
13074 target_percent_s
[0] = target_percent
;
13075 target_percent_s
[1] = target_s
;
13076 target_percent_s
[2] = '\0';
13078 target_percent_s_newline
[0] = target_percent
;
13079 target_percent_s_newline
[1] = target_s
;
13080 target_percent_s_newline
[2] = target_newline
;
13081 target_percent_s_newline
[3] = '\0';
13088 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13089 and no overflow/underflow occurred. INEXACT is true if M was not
13090 exactly calculated. TYPE is the tree type for the result. This
13091 function assumes that you cleared the MPFR flags and then
13092 calculated M to see if anything subsequently set a flag prior to
13093 entering this function. Return NULL_TREE if any checks fail. */
13096 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13098 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13099 overflow/underflow occurred. If -frounding-math, proceed iff the
13100 result of calling FUNC was exact. */
13101 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13102 && (!flag_rounding_math
|| !inexact
))
13104 REAL_VALUE_TYPE rr
;
13106 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13107 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13108 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13109 but the mpft_t is not, then we underflowed in the
13111 if (real_isfinite (&rr
)
13112 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13114 REAL_VALUE_TYPE rmode
;
13116 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13117 /* Proceed iff the specified mode can hold the value. */
13118 if (real_identical (&rmode
, &rr
))
13119 return build_real (type
, rmode
);
13125 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13126 number and no overflow/underflow occurred. INEXACT is true if M
13127 was not exactly calculated. TYPE is the tree type for the result.
13128 This function assumes that you cleared the MPFR flags and then
13129 calculated M to see if anything subsequently set a flag prior to
13130 entering this function. Return NULL_TREE if any checks fail, if
13131 FORCE_CONVERT is true, then bypass the checks. */
13134 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13136 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13137 overflow/underflow occurred. If -frounding-math, proceed iff the
13138 result of calling FUNC was exact. */
13140 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13141 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13142 && (!flag_rounding_math
|| !inexact
)))
13144 REAL_VALUE_TYPE re
, im
;
13146 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13147 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13148 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13149 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13150 but the mpft_t is not, then we underflowed in the
13153 || (real_isfinite (&re
) && real_isfinite (&im
)
13154 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13155 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13157 REAL_VALUE_TYPE re_mode
, im_mode
;
13159 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13160 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13161 /* Proceed iff the specified mode can hold the value. */
13163 || (real_identical (&re_mode
, &re
)
13164 && real_identical (&im_mode
, &im
)))
13165 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13166 build_real (TREE_TYPE (type
), im_mode
));
13172 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13173 FUNC on it and return the resulting value as a tree with type TYPE.
13174 If MIN and/or MAX are not NULL, then the supplied ARG must be
13175 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13176 acceptable values, otherwise they are not. The mpfr precision is
13177 set to the precision of TYPE. We assume that function FUNC returns
13178 zero if the result could be calculated exactly within the requested
13182 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13183 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13186 tree result
= NULL_TREE
;
13190 /* To proceed, MPFR must exactly represent the target floating point
13191 format, which only happens when the target base equals two. */
13192 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13193 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13195 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13197 if (real_isfinite (ra
)
13198 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13199 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13201 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13202 const int prec
= fmt
->p
;
13203 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13207 mpfr_init2 (m
, prec
);
13208 mpfr_from_real (m
, ra
, GMP_RNDN
);
13209 mpfr_clear_flags ();
13210 inexact
= func (m
, m
, rnd
);
13211 result
= do_mpfr_ckconv (m
, type
, inexact
);
13219 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13220 FUNC on it and return the resulting value as a tree with type TYPE.
13221 The mpfr precision is set to the precision of TYPE. We assume that
13222 function FUNC returns zero if the result could be calculated
13223 exactly within the requested precision. */
13226 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13227 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13229 tree result
= NULL_TREE
;
13234 /* To proceed, MPFR must exactly represent the target floating point
13235 format, which only happens when the target base equals two. */
13236 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13237 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13238 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13240 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13241 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13243 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13245 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13246 const int prec
= fmt
->p
;
13247 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13251 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13252 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13253 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13254 mpfr_clear_flags ();
13255 inexact
= func (m1
, m1
, m2
, rnd
);
13256 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13257 mpfr_clears (m1
, m2
, NULL
);
13264 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13265 FUNC on it and return the resulting value as a tree with type TYPE.
13266 The mpfr precision is set to the precision of TYPE. We assume that
13267 function FUNC returns zero if the result could be calculated
13268 exactly within the requested precision. */
13271 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13272 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13274 tree result
= NULL_TREE
;
13280 /* To proceed, MPFR must exactly represent the target floating point
13281 format, which only happens when the target base equals two. */
13282 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13283 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13284 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13285 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13287 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13288 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13289 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13291 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13293 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13294 const int prec
= fmt
->p
;
13295 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13299 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13300 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13301 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13302 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13303 mpfr_clear_flags ();
13304 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13305 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13306 mpfr_clears (m1
, m2
, m3
, NULL
);
13313 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13314 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13315 If ARG_SINP and ARG_COSP are NULL then the result is returned
13316 as a complex value.
13317 The type is taken from the type of ARG and is used for setting the
13318 precision of the calculation and results. */
13321 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13323 tree
const type
= TREE_TYPE (arg
);
13324 tree result
= NULL_TREE
;
13328 /* To proceed, MPFR must exactly represent the target floating point
13329 format, which only happens when the target base equals two. */
13330 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13331 && TREE_CODE (arg
) == REAL_CST
13332 && !TREE_OVERFLOW (arg
))
13334 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13336 if (real_isfinite (ra
))
13338 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13339 const int prec
= fmt
->p
;
13340 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13341 tree result_s
, result_c
;
13345 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13346 mpfr_from_real (m
, ra
, GMP_RNDN
);
13347 mpfr_clear_flags ();
13348 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13349 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13350 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13351 mpfr_clears (m
, ms
, mc
, NULL
);
13352 if (result_s
&& result_c
)
13354 /* If we are to return in a complex value do so. */
13355 if (!arg_sinp
&& !arg_cosp
)
13356 return build_complex (build_complex_type (type
),
13357 result_c
, result_s
);
13359 /* Dereference the sin/cos pointer arguments. */
13360 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13361 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13362 /* Proceed if valid pointer type were passed in. */
13363 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13364 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13366 /* Set the values. */
13367 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13369 TREE_SIDE_EFFECTS (result_s
) = 1;
13370 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13372 TREE_SIDE_EFFECTS (result_c
) = 1;
13373 /* Combine the assignments into a compound expr. */
13374 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13375 result_s
, result_c
));
13383 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13384 two-argument mpfr order N Bessel function FUNC on them and return
13385 the resulting value as a tree with type TYPE. The mpfr precision
13386 is set to the precision of TYPE. We assume that function FUNC
13387 returns zero if the result could be calculated exactly within the
13388 requested precision. */
13390 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13391 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13392 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13394 tree result
= NULL_TREE
;
13399 /* To proceed, MPFR must exactly represent the target floating point
13400 format, which only happens when the target base equals two. */
13401 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13402 && host_integerp (arg1
, 0)
13403 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13405 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13406 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13409 && real_isfinite (ra
)
13410 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13412 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13413 const int prec
= fmt
->p
;
13414 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13418 mpfr_init2 (m
, prec
);
13419 mpfr_from_real (m
, ra
, GMP_RNDN
);
13420 mpfr_clear_flags ();
13421 inexact
= func (m
, n
, m
, rnd
);
13422 result
= do_mpfr_ckconv (m
, type
, inexact
);
13430 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13431 the pointer *(ARG_QUO) and return the result. The type is taken
13432 from the type of ARG0 and is used for setting the precision of the
13433 calculation and results. */
13436 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13438 tree
const type
= TREE_TYPE (arg0
);
13439 tree result
= NULL_TREE
;
13444 /* To proceed, MPFR must exactly represent the target floating point
13445 format, which only happens when the target base equals two. */
13446 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13447 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13448 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13450 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13451 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13453 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13455 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13456 const int prec
= fmt
->p
;
13457 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13462 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13463 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13464 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13465 mpfr_clear_flags ();
13466 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13467 /* Remquo is independent of the rounding mode, so pass
13468 inexact=0 to do_mpfr_ckconv(). */
13469 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13470 mpfr_clears (m0
, m1
, NULL
);
13473 /* MPFR calculates quo in the host's long so it may
13474 return more bits in quo than the target int can hold
13475 if sizeof(host long) > sizeof(target int). This can
13476 happen even for native compilers in LP64 mode. In
13477 these cases, modulo the quo value with the largest
13478 number that the target int can hold while leaving one
13479 bit for the sign. */
13480 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13481 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13483 /* Dereference the quo pointer argument. */
13484 arg_quo
= build_fold_indirect_ref (arg_quo
);
13485 /* Proceed iff a valid pointer type was passed in. */
13486 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13488 /* Set the value. */
13489 tree result_quo
= fold_build2 (MODIFY_EXPR
,
13490 TREE_TYPE (arg_quo
), arg_quo
,
13491 build_int_cst (NULL
, integer_quo
));
13492 TREE_SIDE_EFFECTS (result_quo
) = 1;
13493 /* Combine the quo assignment with the rem. */
13494 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13495 result_quo
, result_rem
));
13503 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13504 resulting value as a tree with type TYPE. The mpfr precision is
13505 set to the precision of TYPE. We assume that this mpfr function
13506 returns zero if the result could be calculated exactly within the
13507 requested precision. In addition, the integer pointer represented
13508 by ARG_SG will be dereferenced and set to the appropriate signgam
13512 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13514 tree result
= NULL_TREE
;
13518 /* To proceed, MPFR must exactly represent the target floating point
13519 format, which only happens when the target base equals two. Also
13520 verify ARG is a constant and that ARG_SG is an int pointer. */
13521 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13522 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13523 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13524 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13526 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13528 /* In addition to NaN and Inf, the argument cannot be zero or a
13529 negative integer. */
13530 if (real_isfinite (ra
)
13531 && ra
->cl
!= rvc_zero
13532 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
13534 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13535 const int prec
= fmt
->p
;
13536 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13541 mpfr_init2 (m
, prec
);
13542 mpfr_from_real (m
, ra
, GMP_RNDN
);
13543 mpfr_clear_flags ();
13544 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
13545 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13551 /* Dereference the arg_sg pointer argument. */
13552 arg_sg
= build_fold_indirect_ref (arg_sg
);
13553 /* Assign the signgam value into *arg_sg. */
13554 result_sg
= fold_build2 (MODIFY_EXPR
,
13555 TREE_TYPE (arg_sg
), arg_sg
,
13556 build_int_cst (NULL
, sg
));
13557 TREE_SIDE_EFFECTS (result_sg
) = 1;
13558 /* Combine the signgam assignment with the lgamma result. */
13559 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13560 result_sg
, result_lg
));
13568 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13569 function FUNC on it and return the resulting value as a tree with
13570 type TYPE. The mpfr precision is set to the precision of TYPE. We
13571 assume that function FUNC returns zero if the result could be
13572 calculated exactly within the requested precision. */
13575 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
13577 tree result
= NULL_TREE
;
13581 /* To proceed, MPFR must exactly represent the target floating point
13582 format, which only happens when the target base equals two. */
13583 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
13584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
13585 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
13587 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
13588 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
13590 if (real_isfinite (re
) && real_isfinite (im
))
13592 const struct real_format
*const fmt
=
13593 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
13594 const int prec
= fmt
->p
;
13595 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13596 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
13600 mpc_init2 (m
, prec
);
13601 mpfr_from_real (mpc_realref(m
), re
, rnd
);
13602 mpfr_from_real (mpc_imagref(m
), im
, rnd
);
13603 mpfr_clear_flags ();
13604 inexact
= func (m
, m
, crnd
);
13605 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
13613 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13614 mpc function FUNC on it and return the resulting value as a tree
13615 with type TYPE. The mpfr precision is set to the precision of
13616 TYPE. We assume that function FUNC returns zero if the result
13617 could be calculated exactly within the requested precision. If
13618 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13619 in the arguments and/or results. */
13622 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
13623 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
13625 tree result
= NULL_TREE
;
13630 /* To proceed, MPFR must exactly represent the target floating point
13631 format, which only happens when the target base equals two. */
13632 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
13633 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
13634 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
13635 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
13636 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
13638 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
13639 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
13640 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
13641 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
13644 || (real_isfinite (re0
) && real_isfinite (im0
)
13645 && real_isfinite (re1
) && real_isfinite (im1
)))
13647 const struct real_format
*const fmt
=
13648 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
13649 const int prec
= fmt
->p
;
13650 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13651 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
13655 mpc_init2 (m0
, prec
);
13656 mpc_init2 (m1
, prec
);
13657 mpfr_from_real (mpc_realref(m0
), re0
, rnd
);
13658 mpfr_from_real (mpc_imagref(m0
), im0
, rnd
);
13659 mpfr_from_real (mpc_realref(m1
), re1
, rnd
);
13660 mpfr_from_real (mpc_imagref(m1
), im1
, rnd
);
13661 mpfr_clear_flags ();
13662 inexact
= func (m0
, m0
, m1
, crnd
);
13663 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
13672 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13673 a normal call should be emitted rather than expanding the function
13674 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13677 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
13679 int nargs
= gimple_call_num_args (stmt
);
13681 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
13683 ? gimple_call_arg_ptr (stmt
, 0)
13684 : &error_mark_node
), fcode
);
13687 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13688 a normal call should be emitted rather than expanding the function
13689 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13690 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13691 passed as second argument. */
13694 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
13695 enum built_in_function fcode
)
13697 int nargs
= gimple_call_num_args (stmt
);
13699 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
13701 ? gimple_call_arg_ptr (stmt
, 0)
13702 : &error_mark_node
), maxlen
, fcode
);
13705 /* Builtins with folding operations that operate on "..." arguments
13706 need special handling; we need to store the arguments in a convenient
13707 data structure before attempting any folding. Fortunately there are
13708 only a few builtins that fall into this category. FNDECL is the
13709 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13710 result of the function call is ignored. */
13713 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
13714 bool ignore ATTRIBUTE_UNUSED
)
13716 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
13717 tree ret
= NULL_TREE
;
13721 case BUILT_IN_SPRINTF_CHK
:
13722 case BUILT_IN_VSPRINTF_CHK
:
13723 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
13726 case BUILT_IN_SNPRINTF_CHK
:
13727 case BUILT_IN_VSNPRINTF_CHK
:
13728 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
13735 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
13736 TREE_NO_WARNING (ret
) = 1;
13742 /* A wrapper function for builtin folding that prevents warnings for
13743 "statement without effect" and the like, caused by removing the
13744 call node earlier than the warning is generated. */
13747 fold_call_stmt (gimple stmt
, bool ignore
)
13749 tree ret
= NULL_TREE
;
13750 tree fndecl
= gimple_call_fndecl (stmt
);
13751 location_t loc
= gimple_location (stmt
);
13753 && TREE_CODE (fndecl
) == FUNCTION_DECL
13754 && DECL_BUILT_IN (fndecl
)
13755 && !gimple_call_va_arg_pack_p (stmt
))
13757 int nargs
= gimple_call_num_args (stmt
);
13758 tree
*args
= (nargs
> 0
13759 ? gimple_call_arg_ptr (stmt
, 0)
13760 : &error_mark_node
);
13762 if (avoid_folding_inline_builtin (fndecl
))
13764 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
13766 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
13770 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
13771 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
13773 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
13776 /* Propagate location information from original call to
13777 expansion of builtin. Otherwise things like
13778 maybe_emit_chk_warning, that operate on the expansion
13779 of a builtin, will use the wrong location information. */
13780 if (gimple_has_location (stmt
))
13782 tree realret
= ret
;
13783 if (TREE_CODE (ret
) == NOP_EXPR
)
13784 realret
= TREE_OPERAND (ret
, 0);
13785 if (CAN_HAVE_LOCATION_P (realret
)
13786 && !EXPR_HAS_LOCATION (realret
))
13787 SET_EXPR_LOCATION (realret
, loc
);
13797 /* Look up the function in built_in_decls that corresponds to DECL
13798 and set ASMSPEC as its user assembler name. DECL must be a
13799 function decl that declares a builtin. */
13802 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
13805 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
13806 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
13809 builtin
= built_in_decls
[DECL_FUNCTION_CODE (decl
)];
13810 set_user_assembler_name (builtin
, asmspec
);
13811 switch (DECL_FUNCTION_CODE (decl
))
13813 case BUILT_IN_MEMCPY
:
13814 init_block_move_fn (asmspec
);
13815 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
13817 case BUILT_IN_MEMSET
:
13818 init_block_clear_fn (asmspec
);
13819 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
13821 case BUILT_IN_MEMMOVE
:
13822 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
13824 case BUILT_IN_MEMCMP
:
13825 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
13827 case BUILT_IN_ABORT
:
13828 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
13831 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
13833 set_user_assembler_libfunc ("ffs", asmspec
);
13834 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
13835 MODE_INT
, 0), "ffs");
13843 /* Return true if DECL is a builtin that expands to a constant or similarly
13846 is_simple_builtin (tree decl
)
13848 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
13849 switch (DECL_FUNCTION_CODE (decl
))
13851 /* Builtins that expand to constants. */
13852 case BUILT_IN_CONSTANT_P
:
13853 case BUILT_IN_EXPECT
:
13854 case BUILT_IN_OBJECT_SIZE
:
13855 case BUILT_IN_UNREACHABLE
:
13856 /* Simple register moves or loads from stack. */
13857 case BUILT_IN_RETURN_ADDRESS
:
13858 case BUILT_IN_EXTRACT_RETURN_ADDR
:
13859 case BUILT_IN_FROB_RETURN_ADDR
:
13860 case BUILT_IN_RETURN
:
13861 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
13862 case BUILT_IN_FRAME_ADDRESS
:
13863 case BUILT_IN_VA_END
:
13864 case BUILT_IN_STACK_SAVE
:
13865 case BUILT_IN_STACK_RESTORE
:
13866 /* Exception state returns or moves registers around. */
13867 case BUILT_IN_EH_FILTER
:
13868 case BUILT_IN_EH_POINTER
:
13869 case BUILT_IN_EH_COPY_VALUES
:
13879 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13880 most probably expanded inline into reasonably simple code. This is a
13881 superset of is_simple_builtin. */
13883 is_inexpensive_builtin (tree decl
)
13887 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
13889 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
13890 switch (DECL_FUNCTION_CODE (decl
))
13893 case BUILT_IN_ALLOCA
:
13894 case BUILT_IN_BSWAP32
:
13895 case BUILT_IN_BSWAP64
:
13897 case BUILT_IN_CLZIMAX
:
13898 case BUILT_IN_CLZL
:
13899 case BUILT_IN_CLZLL
:
13901 case BUILT_IN_CTZIMAX
:
13902 case BUILT_IN_CTZL
:
13903 case BUILT_IN_CTZLL
:
13905 case BUILT_IN_FFSIMAX
:
13906 case BUILT_IN_FFSL
:
13907 case BUILT_IN_FFSLL
:
13908 case BUILT_IN_IMAXABS
:
13909 case BUILT_IN_FINITE
:
13910 case BUILT_IN_FINITEF
:
13911 case BUILT_IN_FINITEL
:
13912 case BUILT_IN_FINITED32
:
13913 case BUILT_IN_FINITED64
:
13914 case BUILT_IN_FINITED128
:
13915 case BUILT_IN_FPCLASSIFY
:
13916 case BUILT_IN_ISFINITE
:
13917 case BUILT_IN_ISINF_SIGN
:
13918 case BUILT_IN_ISINF
:
13919 case BUILT_IN_ISINFF
:
13920 case BUILT_IN_ISINFL
:
13921 case BUILT_IN_ISINFD32
:
13922 case BUILT_IN_ISINFD64
:
13923 case BUILT_IN_ISINFD128
:
13924 case BUILT_IN_ISNAN
:
13925 case BUILT_IN_ISNANF
:
13926 case BUILT_IN_ISNANL
:
13927 case BUILT_IN_ISNAND32
:
13928 case BUILT_IN_ISNAND64
:
13929 case BUILT_IN_ISNAND128
:
13930 case BUILT_IN_ISNORMAL
:
13931 case BUILT_IN_ISGREATER
:
13932 case BUILT_IN_ISGREATEREQUAL
:
13933 case BUILT_IN_ISLESS
:
13934 case BUILT_IN_ISLESSEQUAL
:
13935 case BUILT_IN_ISLESSGREATER
:
13936 case BUILT_IN_ISUNORDERED
:
13937 case BUILT_IN_VA_ARG_PACK
:
13938 case BUILT_IN_VA_ARG_PACK_LEN
:
13939 case BUILT_IN_VA_COPY
:
13940 case BUILT_IN_TRAP
:
13941 case BUILT_IN_SAVEREGS
:
13942 case BUILT_IN_POPCOUNTL
:
13943 case BUILT_IN_POPCOUNTLL
:
13944 case BUILT_IN_POPCOUNTIMAX
:
13945 case BUILT_IN_POPCOUNT
:
13946 case BUILT_IN_PARITYL
:
13947 case BUILT_IN_PARITYLL
:
13948 case BUILT_IN_PARITYIMAX
:
13949 case BUILT_IN_PARITY
:
13950 case BUILT_IN_LABS
:
13951 case BUILT_IN_LLABS
:
13952 case BUILT_IN_PREFETCH
:
13956 return is_simple_builtin (decl
);