1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
31 #include "tree-object-size.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
41 #include "hard-reg-set.h"
44 #include "insn-config.h"
50 #include "typeclass.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
64 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
66 struct target_builtins default_target_builtins
;
68 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names
[BUILT_IN_LAST
]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names
[(int) END_BUILTINS
] =
78 #include "builtins.def"
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info
;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p
;
89 static rtx
c_readstr (const char *, enum machine_mode
);
90 static int target_char_cast (tree
, char *);
91 static rtx
get_memory_rtx (tree
, tree
);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx
result_vector (int, rtx
);
97 static void expand_builtin_update_setjmp_buf (rtx
);
98 static void expand_builtin_prefetch (tree
);
99 static rtx
expand_builtin_apply_args (void);
100 static rtx
expand_builtin_apply_args_1 (void);
101 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
102 static void expand_builtin_return (rtx
);
103 static enum type_class
type_to_class (tree
);
104 static rtx
expand_builtin_classify_type (tree
);
105 static void expand_errno_check (tree
, rtx
);
106 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
107 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
111 static rtx
expand_builtin_sincos (tree
);
112 static rtx
expand_builtin_cexpi (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
115 static rtx
expand_builtin_next_arg (void);
116 static rtx
expand_builtin_va_start (tree
);
117 static rtx
expand_builtin_va_end (tree
);
118 static rtx
expand_builtin_va_copy (tree
);
119 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strcmp (tree
, rtx
);
121 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
122 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static rtx
expand_builtin_memcpy (tree
, rtx
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
126 enum machine_mode
, int);
127 static rtx
expand_builtin_strcpy (tree
, rtx
);
128 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
129 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
130 static rtx
expand_builtin_strncpy (tree
, rtx
);
131 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
133 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
134 static rtx
expand_builtin_bzero (tree
);
135 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_alloca (tree
, bool);
137 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
138 static rtx
expand_builtin_frame_address (tree
, tree
);
139 static tree
stabilize_va_list_loc (location_t
, tree
, int);
140 static rtx
expand_builtin_expect (tree
, rtx
);
141 static tree
fold_builtin_constant_p (tree
);
142 static tree
fold_builtin_classify_type (tree
);
143 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
144 static tree
fold_builtin_inf (location_t
, tree
, int);
145 static tree
fold_builtin_nan (tree
, tree
, int);
146 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
147 static bool validate_arg (const_tree
, enum tree_code code
);
148 static bool integer_valued_real_p (tree
);
149 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
150 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
151 static rtx
expand_builtin_signbit (tree
, rtx
);
152 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
153 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
154 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
155 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
156 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
157 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
158 static tree
fold_builtin_tan (tree
, tree
);
159 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
160 static tree
fold_builtin_floor (location_t
, tree
, tree
);
161 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
162 static tree
fold_builtin_round (location_t
, tree
, tree
);
163 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
164 static tree
fold_builtin_bitop (tree
, tree
);
165 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
166 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
168 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
169 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
170 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
171 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
172 static tree
fold_builtin_isascii (location_t
, tree
);
173 static tree
fold_builtin_toascii (location_t
, tree
);
174 static tree
fold_builtin_isdigit (location_t
, tree
);
175 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
176 static tree
fold_builtin_abs (location_t
, tree
, tree
);
177 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
179 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
180 static tree
fold_builtin_0 (location_t
, tree
, bool);
181 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
182 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
183 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
184 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
185 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
187 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
188 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
189 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
190 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
192 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
194 static rtx
expand_builtin_object_size (tree
);
195 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
196 enum built_in_function
);
197 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
198 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
199 static void maybe_emit_free_warning (tree
);
200 static tree
fold_builtin_object_size (tree
, tree
);
201 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
202 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
203 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
204 enum built_in_function
);
206 static unsigned HOST_WIDE_INT target_newline
;
207 unsigned HOST_WIDE_INT target_percent
;
208 static unsigned HOST_WIDE_INT target_c
;
209 static unsigned HOST_WIDE_INT target_s
;
210 static char target_percent_c
[3];
211 char target_percent_s
[3];
212 static char target_percent_s_newline
[4];
213 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
214 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
215 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
216 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
217 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
218 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
219 static tree
do_mpfr_sincos (tree
, tree
, tree
);
220 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
221 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
222 const REAL_VALUE_TYPE
*, bool);
223 static tree
do_mpfr_remquo (tree
, tree
, tree
);
224 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
225 static void expand_builtin_sync_synchronize (void);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
230 is_builtin_name (const char *name
)
232 if (strncmp (name
, "__builtin_", 10) == 0)
234 if (strncmp (name
, "__sync_", 7) == 0)
236 if (strncmp (name
, "__atomic_", 9) == 0)
239 && (!strcmp (name
, "__cilkrts_detach")
240 || !strcmp (name
, "__cilkrts_pop_frame")))
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl
)
251 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
259 called_as_built_in (tree node
)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
264 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
265 return is_builtin_name (name
);
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
284 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
285 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
287 HOST_WIDE_INT bitsize
, bitpos
;
289 enum machine_mode mode
;
290 int unsignedp
, volatilep
;
291 unsigned int align
= BITS_PER_UNIT
;
292 bool known_alignment
= false;
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
297 &mode
, &unsignedp
, &volatilep
, true);
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp
) == FUNCTION_DECL
)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
308 align
= 2 * BITS_PER_UNIT
;
310 else if (TREE_CODE (exp
) == LABEL_DECL
)
312 else if (TREE_CODE (exp
) == CONST_DECL
)
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp
= DECL_INITIAL (exp
);
316 align
= TYPE_ALIGN (TREE_TYPE (exp
));
317 #ifdef CONSTANT_ALIGNMENT
318 if (CONSTANT_CLASS_P (exp
))
319 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
321 known_alignment
= true;
323 else if (DECL_P (exp
))
325 align
= DECL_ALIGN (exp
);
326 known_alignment
= true;
328 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
330 align
= TYPE_ALIGN (TREE_TYPE (exp
));
332 else if (TREE_CODE (exp
) == INDIRECT_REF
333 || TREE_CODE (exp
) == MEM_REF
334 || TREE_CODE (exp
) == TARGET_MEM_REF
)
336 tree addr
= TREE_OPERAND (exp
, 0);
338 unsigned HOST_WIDE_INT ptr_bitpos
;
340 if (TREE_CODE (addr
) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
343 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
344 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
345 align
*= BITS_PER_UNIT
;
346 addr
= TREE_OPERAND (addr
, 0);
350 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
351 align
= MAX (ptr_align
, align
);
353 /* The alignment of the pointer operand in a TARGET_MEM_REF
354 has to take the variable offset parts into account. */
355 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
359 unsigned HOST_WIDE_INT step
= 1;
361 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
362 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
364 if (TMR_INDEX2 (exp
))
365 align
= BITS_PER_UNIT
;
366 known_alignment
= false;
369 /* When EXP is an actual memory reference then we can use
370 TYPE_ALIGN of a pointer indirection to derive alignment.
371 Do so only if get_pointer_alignment_1 did not reveal absolute
372 alignment knowledge and if using that alignment would
373 improve the situation. */
374 if (!addr_p
&& !known_alignment
375 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
376 align
= TYPE_ALIGN (TREE_TYPE (exp
));
379 /* Else adjust bitpos accordingly. */
380 bitpos
+= ptr_bitpos
;
381 if (TREE_CODE (exp
) == MEM_REF
382 || TREE_CODE (exp
) == TARGET_MEM_REF
)
383 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
386 else if (TREE_CODE (exp
) == STRING_CST
)
388 /* STRING_CST are the only constant objects we allow to be not
389 wrapped inside a CONST_DECL. */
390 align
= TYPE_ALIGN (TREE_TYPE (exp
));
391 #ifdef CONSTANT_ALIGNMENT
392 if (CONSTANT_CLASS_P (exp
))
393 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
395 known_alignment
= true;
398 /* If there is a non-constant offset part extract the maximum
399 alignment that can prevail. */
402 unsigned int trailing_zeros
= tree_ctz (offset
);
403 if (trailing_zeros
< HOST_BITS_PER_INT
)
405 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
407 align
= MIN (align
, inner
);
412 *bitposp
= bitpos
& (*alignp
- 1);
413 return known_alignment
;
416 /* For a memory reference expression EXP compute values M and N such that M
417 divides (&EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Otherwise return false
419 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
422 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
423 unsigned HOST_WIDE_INT
*bitposp
)
425 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
428 /* Return the alignment in bits of EXP, an object. */
431 get_object_alignment (tree exp
)
433 unsigned HOST_WIDE_INT bitpos
= 0;
436 get_object_alignment_1 (exp
, &align
, &bitpos
);
438 /* align and bitpos now specify known low bits of the pointer.
439 ptr & (align - 1) == bitpos. */
442 align
= (bitpos
& -bitpos
);
446 /* For a pointer valued expression EXP compute values M and N such that M
447 divides (EXP - N) and such that N < M. If these numbers can be determined,
448 store M in alignp and N in *BITPOSP and return true. Return false if
449 the results are just a conservative approximation.
451 If EXP is not a pointer, false is returned too. */
454 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
455 unsigned HOST_WIDE_INT
*bitposp
)
459 if (TREE_CODE (exp
) == ADDR_EXPR
)
460 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
461 alignp
, bitposp
, true);
462 else if (TREE_CODE (exp
) == SSA_NAME
463 && POINTER_TYPE_P (TREE_TYPE (exp
)))
465 unsigned int ptr_align
, ptr_misalign
;
466 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
468 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
470 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
471 *alignp
= ptr_align
* BITS_PER_UNIT
;
472 /* We cannot really tell whether this result is an approximation. */
478 *alignp
= BITS_PER_UNIT
;
482 else if (TREE_CODE (exp
) == INTEGER_CST
)
484 *alignp
= BIGGEST_ALIGNMENT
;
485 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
486 & (BIGGEST_ALIGNMENT
- 1));
491 *alignp
= BITS_PER_UNIT
;
495 /* Return the alignment in bits of EXP, a pointer valued expression.
496 The alignment returned is, by default, the alignment of the thing that
497 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499 Otherwise, look at the expression to see if we can do better, i.e., if the
500 expression is actually pointing at an object whose alignment is tighter. */
503 get_pointer_alignment (tree exp
)
505 unsigned HOST_WIDE_INT bitpos
= 0;
508 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
510 /* align and bitpos now specify known low bits of the pointer.
511 ptr & (align - 1) == bitpos. */
514 align
= (bitpos
& -bitpos
);
519 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
520 way, because it could contain a zero byte in the middle.
521 TREE_STRING_LENGTH is the size of the character array, not the string.
523 ONLY_VALUE should be nonzero if the result is not going to be emitted
524 into the instruction stream and zero if it is going to be expanded.
525 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
526 is returned, otherwise NULL, since
527 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
528 evaluate the side-effects.
530 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
531 accesses. Note that this implies the result is not going to be emitted
532 into the instruction stream.
534 The value returned is of type `ssizetype'.
536 Unfortunately, string_constant can't access the values of const char
537 arrays with initializers, so neither can we do so here. */
540 c_strlen (tree src
, int only_value
)
543 HOST_WIDE_INT offset
;
549 if (TREE_CODE (src
) == COND_EXPR
550 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
554 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
555 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
556 if (tree_int_cst_equal (len1
, len2
))
560 if (TREE_CODE (src
) == COMPOUND_EXPR
561 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
562 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
564 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
566 src
= string_constant (src
, &offset_node
);
570 max
= TREE_STRING_LENGTH (src
) - 1;
571 ptr
= TREE_STRING_POINTER (src
);
573 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
575 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
576 compute the offset to the following null if we don't know where to
577 start searching for it. */
580 for (i
= 0; i
< max
; i
++)
584 /* We don't know the starting offset, but we do know that the string
585 has no internal zero bytes. We can assume that the offset falls
586 within the bounds of the string; otherwise, the programmer deserves
587 what he gets. Subtract the offset from the length of the string,
588 and return that. This would perhaps not be valid if we were dealing
589 with named arrays in addition to literal string constants. */
591 return size_diffop_loc (loc
, size_int (max
), offset_node
);
594 /* We have a known offset into the string. Start searching there for
595 a null character if we can represent it as a single HOST_WIDE_INT. */
596 if (offset_node
== 0)
598 else if (! tree_fits_shwi_p (offset_node
))
601 offset
= tree_to_shwi (offset_node
);
603 /* If the offset is known to be out of bounds, warn, and call strlen at
605 if (offset
< 0 || offset
> max
)
607 /* Suppress multiple warnings for propagated constant strings. */
609 && !TREE_NO_WARNING (src
))
611 warning_at (loc
, 0, "offset outside bounds of constant string");
612 TREE_NO_WARNING (src
) = 1;
617 /* Use strlen to search for the first zero byte. Since any strings
618 constructed with build_string will have nulls appended, we win even
619 if we get handed something like (char[4])"abcd".
621 Since OFFSET is our starting index into the string, no further
622 calculation is needed. */
623 return ssize_int (strlen (ptr
+ offset
));
626 /* Return a char pointer for a C string if it is a string constant
627 or sum of string constant and integer constant. */
634 src
= string_constant (src
, &offset_node
);
638 if (offset_node
== 0)
639 return TREE_STRING_POINTER (src
);
640 else if (!tree_fits_uhwi_p (offset_node
)
641 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
644 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
647 /* Return a constant integer corresponding to target reading
648 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
651 c_readstr (const char *str
, enum machine_mode mode
)
655 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
657 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
658 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
659 / HOST_BITS_PER_WIDE_INT
;
661 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
662 for (i
= 0; i
< len
; i
++)
666 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
669 if (WORDS_BIG_ENDIAN
)
670 j
= GET_MODE_SIZE (mode
) - i
- 1;
671 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
672 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
673 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
677 ch
= (unsigned char) str
[i
];
678 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
681 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
682 return immed_wide_int_const (c
, mode
);
685 /* Cast a target constant CST to target CHAR and if that value fits into
686 host char type, return zero and put that value into variable pointed to by
690 target_char_cast (tree cst
, char *p
)
692 unsigned HOST_WIDE_INT val
, hostval
;
694 if (TREE_CODE (cst
) != INTEGER_CST
695 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
698 /* Do not care if it fits or not right here. */
699 val
= TREE_INT_CST_LOW (cst
);
701 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
702 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
705 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
706 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
715 /* Similar to save_expr, but assumes that arbitrary code is not executed
716 in between the multiple evaluations. In particular, we assume that a
717 non-addressable local variable will not be modified. */
720 builtin_save_expr (tree exp
)
722 if (TREE_CODE (exp
) == SSA_NAME
723 || (TREE_ADDRESSABLE (exp
) == 0
724 && (TREE_CODE (exp
) == PARM_DECL
725 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
728 return save_expr (exp
);
731 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
732 times to get the address of either a higher stack frame, or a return
733 address located within it (depending on FNDECL_CODE). */
736 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
740 #ifdef INITIAL_FRAME_ADDRESS_RTX
741 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
745 /* For a zero count with __builtin_return_address, we don't care what
746 frame address we return, because target-specific definitions will
747 override us. Therefore frame pointer elimination is OK, and using
748 the soft frame pointer is OK.
750 For a nonzero count, or a zero count with __builtin_frame_address,
751 we require a stable offset from the current frame pointer to the
752 previous one, so we must use the hard frame pointer, and
753 we must disable frame pointer elimination. */
754 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
755 tem
= frame_pointer_rtx
;
758 tem
= hard_frame_pointer_rtx
;
760 /* Tell reload not to eliminate the frame pointer. */
761 crtl
->accesses_prior_frames
= 1;
765 /* Some machines need special handling before we can access
766 arbitrary frames. For example, on the SPARC, we must first flush
767 all register windows to the stack. */
768 #ifdef SETUP_FRAME_ADDRESSES
770 SETUP_FRAME_ADDRESSES ();
773 /* On the SPARC, the return address is not in the frame, it is in a
774 register. There is no way to access it off of the current frame
775 pointer, but it can be accessed off the previous frame pointer by
776 reading the value from the register window save area. */
777 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
778 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
782 /* Scan back COUNT frames to the specified frame. */
783 for (i
= 0; i
< count
; i
++)
785 /* Assume the dynamic chain pointer is in the word that the
786 frame address points to, unless otherwise specified. */
787 #ifdef DYNAMIC_CHAIN_ADDRESS
788 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
790 tem
= memory_address (Pmode
, tem
);
791 tem
= gen_frame_mem (Pmode
, tem
);
792 tem
= copy_to_reg (tem
);
795 /* For __builtin_frame_address, return what we've got. But, on
796 the SPARC for example, we may have to add a bias. */
797 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
798 #ifdef FRAME_ADDR_RTX
799 return FRAME_ADDR_RTX (tem
);
804 /* For __builtin_return_address, get the return address from that frame. */
805 #ifdef RETURN_ADDR_RTX
806 tem
= RETURN_ADDR_RTX (count
, tem
);
808 tem
= memory_address (Pmode
,
809 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
810 tem
= gen_frame_mem (Pmode
, tem
);
815 /* Alias set used for setjmp buffer. */
816 static alias_set_type setjmp_alias_set
= -1;
818 /* Construct the leading half of a __builtin_setjmp call. Control will
819 return to RECEIVER_LABEL. This is also called directly by the SJLJ
820 exception handling code. */
823 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
825 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
829 if (setjmp_alias_set
== -1)
830 setjmp_alias_set
= new_alias_set ();
832 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
834 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
836 /* We store the frame pointer and the address of receiver_label in
837 the buffer and use the rest of it for the stack save area, which
838 is machine-dependent. */
840 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
841 set_mem_alias_set (mem
, setjmp_alias_set
);
842 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
844 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
845 GET_MODE_SIZE (Pmode
))),
846 set_mem_alias_set (mem
, setjmp_alias_set
);
848 emit_move_insn (validize_mem (mem
),
849 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
851 stack_save
= gen_rtx_MEM (sa_mode
,
852 plus_constant (Pmode
, buf_addr
,
853 2 * GET_MODE_SIZE (Pmode
)));
854 set_mem_alias_set (stack_save
, setjmp_alias_set
);
855 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
857 /* If there is further processing to do, do it. */
858 #ifdef HAVE_builtin_setjmp_setup
859 if (HAVE_builtin_setjmp_setup
)
860 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
863 /* We have a nonlocal label. */
864 cfun
->has_nonlocal_label
= 1;
867 /* Construct the trailing part of a __builtin_setjmp call. This is
868 also called directly by the SJLJ exception handling code.
869 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
872 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
876 /* Mark the FP as used when we get here, so we have to make sure it's
877 marked as used by this function. */
878 emit_use (hard_frame_pointer_rtx
);
880 /* Mark the static chain as clobbered here so life information
881 doesn't get messed up for it. */
882 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
883 if (chain
&& REG_P (chain
))
884 emit_clobber (chain
);
886 /* Now put in the code to restore the frame pointer, and argument
887 pointer, if needed. */
888 #ifdef HAVE_nonlocal_goto
889 if (! HAVE_nonlocal_goto
)
892 /* First adjust our frame pointer to its actual value. It was
893 previously set to the start of the virtual area corresponding to
894 the stacked variables when we branched here and now needs to be
895 adjusted to the actual hardware fp value.
897 Assignments to virtual registers are converted by
898 instantiate_virtual_regs into the corresponding assignment
899 to the underlying register (fp in this case) that makes
900 the original assignment true.
901 So the following insn will actually be decrementing fp by
902 STARTING_FRAME_OFFSET. */
903 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
905 /* Restoring the frame pointer also modifies the hard frame pointer.
906 Mark it used (so that the previous assignment remains live once
907 the frame pointer is eliminated) and clobbered (to represent the
908 implicit update from the assignment). */
909 emit_use (hard_frame_pointer_rtx
);
910 emit_clobber (hard_frame_pointer_rtx
);
913 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
914 if (fixed_regs
[ARG_POINTER_REGNUM
])
916 #ifdef ELIMINABLE_REGS
917 /* If the argument pointer can be eliminated in favor of the
918 frame pointer, we don't need to restore it. We assume here
919 that if such an elimination is present, it can always be used.
920 This is the case on all known machines; if we don't make this
921 assumption, we do unnecessary saving on many machines. */
923 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
925 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
926 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
927 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
930 if (i
== ARRAY_SIZE (elim_regs
))
933 /* Now restore our arg pointer from the address at which it
934 was saved in our stack frame. */
935 emit_move_insn (crtl
->args
.internal_arg_pointer
,
936 copy_to_reg (get_arg_pointer_save_area ()));
941 #ifdef HAVE_builtin_setjmp_receiver
942 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
943 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
946 #ifdef HAVE_nonlocal_goto_receiver
947 if (HAVE_nonlocal_goto_receiver
)
948 emit_insn (gen_nonlocal_goto_receiver ());
953 /* We must not allow the code we just generated to be reordered by
954 scheduling. Specifically, the update of the frame pointer must
955 happen immediately, not later. */
956 emit_insn (gen_blockage ());
959 /* __builtin_longjmp is passed a pointer to an array of five words (not
960 all will be used on all machines). It operates similarly to the C
961 library function of the same name, but is more efficient. Much of
962 the code below is copied from the handling of non-local gotos. */
965 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
968 rtx_insn
*insn
, *last
;
969 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
971 /* DRAP is needed for stack realign if longjmp is expanded to current
973 if (SUPPORTS_STACK_ALIGNMENT
)
974 crtl
->need_drap
= true;
976 if (setjmp_alias_set
== -1)
977 setjmp_alias_set
= new_alias_set ();
979 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
981 buf_addr
= force_reg (Pmode
, buf_addr
);
983 /* We require that the user must pass a second argument of 1, because
984 that is what builtin_setjmp will return. */
985 gcc_assert (value
== const1_rtx
);
987 last
= get_last_insn ();
988 #ifdef HAVE_builtin_longjmp
989 if (HAVE_builtin_longjmp
)
990 emit_insn (gen_builtin_longjmp (buf_addr
));
994 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
995 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
996 GET_MODE_SIZE (Pmode
)));
998 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
999 2 * GET_MODE_SIZE (Pmode
)));
1000 set_mem_alias_set (fp
, setjmp_alias_set
);
1001 set_mem_alias_set (lab
, setjmp_alias_set
);
1002 set_mem_alias_set (stack
, setjmp_alias_set
);
1004 /* Pick up FP, label, and SP from the block and jump. This code is
1005 from expand_goto in stmt.c; see there for detailed comments. */
1006 #ifdef HAVE_nonlocal_goto
1007 if (HAVE_nonlocal_goto
)
1008 /* We have to pass a value to the nonlocal_goto pattern that will
1009 get copied into the static_chain pointer, but it does not matter
1010 what that value is, because builtin_setjmp does not use it. */
1011 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1015 lab
= copy_to_reg (lab
);
1017 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1018 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1020 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1021 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1023 emit_use (hard_frame_pointer_rtx
);
1024 emit_use (stack_pointer_rtx
);
1025 emit_indirect_jump (lab
);
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1036 gcc_assert (insn
!= last
);
1040 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1043 else if (CALL_P (insn
))
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1051 return (iter
->i
< iter
->n
);
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipses, otherwise the last specifier must be a
1060 validate_arglist (const_tree callexpr
, ...)
1062 enum tree_code code
;
1065 const_call_expr_arg_iterator iter
;
1068 va_start (ap
, callexpr
);
1069 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1073 code
= (enum tree_code
) va_arg (ap
, int);
1077 /* This signifies an ellipses, any further arguments are all ok. */
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res
= !more_const_call_expr_args_p (&iter
);
1086 /* If no parameters remain or the parameter's code does not
1087 match the specified code, return false. Otherwise continue
1088 checking any remaining arguments. */
1089 arg
= next_const_call_expr_arg (&iter
);
1090 if (!validate_arg (arg
, code
))
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1105 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1106 and the address of the save area. */
1109 expand_builtin_nonlocal_goto (tree exp
)
1111 tree t_label
, t_save_area
;
1112 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1115 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1118 t_label
= CALL_EXPR_ARG (exp
, 0);
1119 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1121 r_label
= expand_normal (t_label
);
1122 r_label
= convert_memory_address (Pmode
, r_label
);
1123 r_save_area
= expand_normal (t_save_area
);
1124 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1125 /* Copy the address of the save location to a register just in case it was
1126 based on the frame pointer. */
1127 r_save_area
= copy_to_reg (r_save_area
);
1128 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1129 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1130 plus_constant (Pmode
, r_save_area
,
1131 GET_MODE_SIZE (Pmode
)));
1133 crtl
->has_nonlocal_goto
= 1;
1135 #ifdef HAVE_nonlocal_goto
1136 /* ??? We no longer need to pass the static chain value, afaik. */
1137 if (HAVE_nonlocal_goto
)
1138 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1142 r_label
= copy_to_reg (r_label
);
1144 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1145 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1147 /* Restore frame pointer for containing function. */
1148 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1149 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1151 /* USE of hard_frame_pointer_rtx added for consistency;
1152 not clear if really needed. */
1153 emit_use (hard_frame_pointer_rtx
);
1154 emit_use (stack_pointer_rtx
);
1156 /* If the architecture is using a GP register, we must
1157 conservatively assume that the target function makes use of it.
1158 The prologue of functions with nonlocal gotos must therefore
1159 initialize the GP register to the appropriate value, and we
1160 must then make sure that this value is live at the point
1161 of the jump. (Note that this doesn't necessarily apply
1162 to targets with a nonlocal_goto pattern; they are free
1163 to implement it in their own way. Note also that this is
1164 a no-op if the GP register is a global invariant.) */
1165 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1166 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1167 emit_use (pic_offset_table_rtx
);
1169 emit_indirect_jump (r_label
);
1172 /* Search backwards to the jump insn and mark it as a
1174 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1178 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1181 else if (CALL_P (insn
))
1188 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189 (not all will be used on all machines) that was passed to __builtin_setjmp.
1190 It updates the stack pointer in that block to correspond to the current
1194 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1196 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1198 = gen_rtx_MEM (sa_mode
,
1201 plus_constant (Pmode
, buf_addr
,
1202 2 * GET_MODE_SIZE (Pmode
))));
1204 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1207 /* Expand a call to __builtin_prefetch. For a target that does not support
1208 data prefetch, evaluate the memory address argument in case it has side
1212 expand_builtin_prefetch (tree exp
)
1214 tree arg0
, arg1
, arg2
;
1218 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1221 arg0
= CALL_EXPR_ARG (exp
, 0);
1223 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1224 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1226 nargs
= call_expr_nargs (exp
);
1228 arg1
= CALL_EXPR_ARG (exp
, 1);
1230 arg1
= integer_zero_node
;
1232 arg2
= CALL_EXPR_ARG (exp
, 2);
1234 arg2
= integer_three_node
;
1236 /* Argument 0 is an address. */
1237 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1239 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1240 if (TREE_CODE (arg1
) != INTEGER_CST
)
1242 error ("second argument to %<__builtin_prefetch%> must be a constant");
1243 arg1
= integer_zero_node
;
1245 op1
= expand_normal (arg1
);
1246 /* Argument 1 must be either zero or one. */
1247 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1249 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1254 /* Argument 2 (locality) must be a compile-time constant int. */
1255 if (TREE_CODE (arg2
) != INTEGER_CST
)
1257 error ("third argument to %<__builtin_prefetch%> must be a constant");
1258 arg2
= integer_zero_node
;
1260 op2
= expand_normal (arg2
);
1261 /* Argument 2 must be 0, 1, 2, or 3. */
1262 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1264 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1268 #ifdef HAVE_prefetch
1271 struct expand_operand ops
[3];
1273 create_address_operand (&ops
[0], op0
);
1274 create_integer_operand (&ops
[1], INTVAL (op1
));
1275 create_integer_operand (&ops
[2], INTVAL (op2
));
1276 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0
) && side_effects_p (op0
))
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1293 get_memory_rtx (tree exp
, tree len
)
1295 tree orig_exp
= exp
;
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1301 exp
= TREE_OPERAND (exp
, 0);
1303 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1304 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp
)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1310 exp
= TREE_OPERAND (exp
, 0);
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp
= fold_build2 (MEM_REF
,
1315 build_array_type (char_type_node
,
1316 build_range_type (sizetype
,
1317 size_one_node
, len
)),
1318 exp
, build_int_cst (ptr_type_node
, 0));
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1324 set_mem_attributes (mem
, exp
, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1326 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1329 exp
= build_fold_addr_expr (exp
);
1330 exp
= fold_build2 (MEM_REF
,
1331 build_array_type (char_type_node
,
1332 build_range_type (sizetype
,
1335 exp
, build_int_cst (ptr_type_node
, 0));
1336 set_mem_attributes (mem
, exp
, 0);
1338 set_mem_alias_set (mem
, 0);
1342 /* Built-in functions to perform an untyped call and return. */
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1353 apply_args_size (void)
1355 static int size
= -1;
1358 enum machine_mode mode
;
1360 /* The values computed by this function never change. */
1363 /* The first value is the incoming arg-pointer. */
1364 size
= GET_MODE_SIZE (Pmode
);
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1369 size
+= GET_MODE_SIZE (Pmode
);
1371 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1372 if (FUNCTION_ARG_REGNO_P (regno
))
1374 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1376 gcc_assert (mode
!= VOIDmode
);
1378 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1379 if (size
% align
!= 0)
1380 size
= CEIL (size
, align
) * align
;
1381 size
+= GET_MODE_SIZE (mode
);
1382 apply_args_mode
[regno
] = mode
;
1386 apply_args_mode
[regno
] = VOIDmode
;
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1396 apply_result_size (void)
1398 static int size
= -1;
1400 enum machine_mode mode
;
1402 /* The values computed by this function never change. */
1407 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1408 if (targetm
.calls
.function_value_regno_p (regno
))
1410 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1412 gcc_assert (mode
!= VOIDmode
);
1414 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1415 if (size
% align
!= 0)
1416 size
= CEIL (size
, align
) * align
;
1417 size
+= GET_MODE_SIZE (mode
);
1418 apply_result_mode
[regno
] = mode
;
1421 apply_result_mode
[regno
] = VOIDmode
;
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size
= APPLY_RESULT_SIZE
;
1432 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1433 /* Create a vector describing the result block RESULT. If SAVEP is true,
1434 the result block is used to save the values; otherwise it is used to
1435 restore the values. */
1438 result_vector (int savep
, rtx result
)
1440 int regno
, size
, align
, nelts
;
1441 enum machine_mode mode
;
1443 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1446 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1447 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1449 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1450 if (size
% align
!= 0)
1451 size
= CEIL (size
, align
) * align
;
1452 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1453 mem
= adjust_address (result
, mode
, size
);
1454 savevec
[nelts
++] = (savep
1455 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1456 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1457 size
+= GET_MODE_SIZE (mode
);
1459 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1461 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1463 /* Save the state required to perform an untyped call with the same
1464 arguments as were passed to the current function. */
1467 expand_builtin_apply_args_1 (void)
1470 int size
, align
, regno
;
1471 enum machine_mode mode
;
1472 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1474 /* Create a block where the arg-pointer, structure value address,
1475 and argument registers can be saved. */
1476 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1478 /* Walk past the arg-pointer and structure value address. */
1479 size
= GET_MODE_SIZE (Pmode
);
1480 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1481 size
+= GET_MODE_SIZE (Pmode
);
1483 /* Save each register used in calling a function to the block. */
1484 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1485 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1487 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1488 if (size
% align
!= 0)
1489 size
= CEIL (size
, align
) * align
;
1491 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1493 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1494 size
+= GET_MODE_SIZE (mode
);
1497 /* Save the arg pointer to the block. */
1498 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1499 #ifdef STACK_GROWS_DOWNWARD
1500 /* We need the pointer as the caller actually passed them to us, not
1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1507 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1509 size
= GET_MODE_SIZE (Pmode
);
1511 /* Save the structure value address unless this is passed as an
1512 "invisible" first argument. */
1513 if (struct_incoming_value
)
1515 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1516 copy_to_reg (struct_incoming_value
));
1517 size
+= GET_MODE_SIZE (Pmode
);
1520 /* Return the address of the block. */
1521 return copy_addr_to_reg (XEXP (registers
, 0));
1524 /* __builtin_apply_args returns block of memory allocated on
1525 the stack into which is stored the arg pointer, structure
1526 value address, static chain, and all the registers that might
1527 possibly be used in performing a function call. The code is
1528 moved to the start of the function so the incoming values are
1532 expand_builtin_apply_args (void)
1534 /* Don't do __builtin_apply_args more than once in a function.
1535 Save the result of the first call and reuse it. */
1536 if (apply_args_value
!= 0)
1537 return apply_args_value
;
1539 /* When this function is called, it means that registers must be
1540 saved on entry to this function. So we migrate the
1541 call to the first insn of this function. */
1546 temp
= expand_builtin_apply_args_1 ();
1550 apply_args_value
= temp
;
1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
1554 chain current, so the code is placed at the start of the
1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1558 push_topmost_sequence ();
1559 if (REG_P (crtl
->args
.internal_arg_pointer
)
1560 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1561 emit_insn_before (seq
, parm_birth_insn
);
1563 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1564 pop_topmost_sequence ();
1569 /* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1573 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1575 int size
, align
, regno
;
1576 enum machine_mode mode
;
1577 rtx incoming_args
, result
, reg
, dest
, src
;
1578 rtx_call_insn
*call_insn
;
1579 rtx old_stack_level
= 0;
1580 rtx call_fusage
= 0;
1581 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1583 arguments
= convert_memory_address (Pmode
, arguments
);
1585 /* Create a block where the return registers can be saved. */
1586 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args
= gen_reg_rtx (Pmode
);
1590 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1591 #ifndef STACK_GROWS_DOWNWARD
1592 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1593 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1599 do_pending_stack_adjust ();
1602 /* Save the stack with nonlocal if available. */
1603 #ifdef HAVE_save_stack_nonlocal
1604 if (HAVE_save_stack_nonlocal
)
1605 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1608 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT
)
1621 crtl
->need_drap
= true;
1623 dest
= virtual_outgoing_args_rtx
;
1624 #ifndef STACK_GROWS_DOWNWARD
1625 if (CONST_INT_P (argsize
))
1626 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1628 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1630 dest
= gen_rtx_MEM (BLKmode
, dest
);
1631 set_mem_align (dest
, PARM_BOUNDARY
);
1632 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1633 set_mem_align (src
, PARM_BOUNDARY
);
1634 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1636 /* Refer to the argument block. */
1638 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1639 set_mem_align (arguments
, PARM_BOUNDARY
);
1641 /* Walk past the arg-pointer and structure value address. */
1642 size
= GET_MODE_SIZE (Pmode
);
1644 size
+= GET_MODE_SIZE (Pmode
);
1646 /* Restore each of the registers previously saved. Make USE insns
1647 for each of these registers for use in making the call. */
1648 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1649 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1651 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1652 if (size
% align
!= 0)
1653 size
= CEIL (size
, align
) * align
;
1654 reg
= gen_rtx_REG (mode
, regno
);
1655 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1656 use_reg (&call_fusage
, reg
);
1657 size
+= GET_MODE_SIZE (mode
);
1660 /* Restore the structure value address unless this is passed as an
1661 "invisible" first argument. */
1662 size
= GET_MODE_SIZE (Pmode
);
1665 rtx value
= gen_reg_rtx (Pmode
);
1666 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1667 emit_move_insn (struct_value
, value
);
1668 if (REG_P (struct_value
))
1669 use_reg (&call_fusage
, struct_value
);
1670 size
+= GET_MODE_SIZE (Pmode
);
1673 /* All arguments and registers used for the call are set up by now! */
1674 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1676 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1677 and we don't want to load it into a register as an optimization,
1678 because prepare_call_address already did it if it should be done. */
1679 if (GET_CODE (function
) != SYMBOL_REF
)
1680 function
= memory_address (FUNCTION_MODE
, function
);
1682 /* Generate the actual call instruction and save the return value. */
1683 #ifdef HAVE_untyped_call
1684 if (HAVE_untyped_call
)
1685 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1686 result
, result_vector (1, result
)));
1689 #ifdef HAVE_call_value
1690 if (HAVE_call_value
)
1694 /* Locate the unique return register. It is not possible to
1695 express a call that sets more than one return register using
1696 call_value; use untyped_call for that. In fact, untyped_call
1697 only needs to save the return registers in the given block. */
1698 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1699 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1701 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1703 valreg
= gen_rtx_REG (mode
, regno
);
1706 emit_call_insn (GEN_CALL_VALUE (valreg
,
1707 gen_rtx_MEM (FUNCTION_MODE
, function
),
1708 const0_rtx
, NULL_RTX
, const0_rtx
));
1710 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1716 /* Find the CALL insn we just emitted, and attach the register usage
1718 call_insn
= last_call_insn ();
1719 add_function_usage_to (call_insn
, call_fusage
);
1721 /* Restore the stack. */
1722 #ifdef HAVE_save_stack_nonlocal
1723 if (HAVE_save_stack_nonlocal
)
1724 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1727 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1728 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1732 /* Return the address of the result block. */
1733 result
= copy_addr_to_reg (XEXP (result
, 0));
1734 return convert_memory_address (ptr_mode
, result
);
1737 /* Perform an untyped return. */
1740 expand_builtin_return (rtx result
)
1742 int size
, align
, regno
;
1743 enum machine_mode mode
;
1745 rtx_insn
*call_fusage
= 0;
1747 result
= convert_memory_address (Pmode
, result
);
1749 apply_result_size ();
1750 result
= gen_rtx_MEM (BLKmode
, result
);
1752 #ifdef HAVE_untyped_return
1753 if (HAVE_untyped_return
)
1755 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1761 /* Restore the return value and note that each value is used. */
1763 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1764 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1766 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1767 if (size
% align
!= 0)
1768 size
= CEIL (size
, align
) * align
;
1769 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1770 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1772 push_to_sequence (call_fusage
);
1774 call_fusage
= get_insns ();
1776 size
+= GET_MODE_SIZE (mode
);
1779 /* Put the USE insns before the return. */
1780 emit_insn (call_fusage
);
1782 /* Return whatever values was restored by jumping directly to the end
1784 expand_naked_return ();
1787 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1789 static enum type_class
1790 type_to_class (tree type
)
1792 switch (TREE_CODE (type
))
1794 case VOID_TYPE
: return void_type_class
;
1795 case INTEGER_TYPE
: return integer_type_class
;
1796 case ENUMERAL_TYPE
: return enumeral_type_class
;
1797 case BOOLEAN_TYPE
: return boolean_type_class
;
1798 case POINTER_TYPE
: return pointer_type_class
;
1799 case REFERENCE_TYPE
: return reference_type_class
;
1800 case OFFSET_TYPE
: return offset_type_class
;
1801 case REAL_TYPE
: return real_type_class
;
1802 case COMPLEX_TYPE
: return complex_type_class
;
1803 case FUNCTION_TYPE
: return function_type_class
;
1804 case METHOD_TYPE
: return method_type_class
;
1805 case RECORD_TYPE
: return record_type_class
;
1807 case QUAL_UNION_TYPE
: return union_type_class
;
1808 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1809 ? string_type_class
: array_type_class
);
1810 case LANG_TYPE
: return lang_type_class
;
1811 default: return no_type_class
;
1815 /* Expand a call EXP to __builtin_classify_type. */
1818 expand_builtin_classify_type (tree exp
)
1820 if (call_expr_nargs (exp
))
1821 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1822 return GEN_INT (no_type_class
);
1825 /* This helper macro, meant to be used in mathfn_built_in below,
1826 determines which among a set of three builtin math functions is
1827 appropriate for a given type mode. The `F' and `L' cases are
1828 automatically generated from the `double' case. */
1829 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1830 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1831 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1832 fcodel = BUILT_IN_MATHFN##L ; break;
1833 /* Similar to above, but appends _R after any F/L suffix. */
1834 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1836 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1837 fcodel = BUILT_IN_MATHFN##L_R ; break;
1839 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1840 if available. If IMPLICIT is true use the implicit builtin declaration,
1841 otherwise use the explicit declaration. If we can't do the conversion,
1845 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1847 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1851 CASE_MATHFN (BUILT_IN_ACOS
)
1852 CASE_MATHFN (BUILT_IN_ACOSH
)
1853 CASE_MATHFN (BUILT_IN_ASIN
)
1854 CASE_MATHFN (BUILT_IN_ASINH
)
1855 CASE_MATHFN (BUILT_IN_ATAN
)
1856 CASE_MATHFN (BUILT_IN_ATAN2
)
1857 CASE_MATHFN (BUILT_IN_ATANH
)
1858 CASE_MATHFN (BUILT_IN_CBRT
)
1859 CASE_MATHFN (BUILT_IN_CEIL
)
1860 CASE_MATHFN (BUILT_IN_CEXPI
)
1861 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1862 CASE_MATHFN (BUILT_IN_COS
)
1863 CASE_MATHFN (BUILT_IN_COSH
)
1864 CASE_MATHFN (BUILT_IN_DREM
)
1865 CASE_MATHFN (BUILT_IN_ERF
)
1866 CASE_MATHFN (BUILT_IN_ERFC
)
1867 CASE_MATHFN (BUILT_IN_EXP
)
1868 CASE_MATHFN (BUILT_IN_EXP10
)
1869 CASE_MATHFN (BUILT_IN_EXP2
)
1870 CASE_MATHFN (BUILT_IN_EXPM1
)
1871 CASE_MATHFN (BUILT_IN_FABS
)
1872 CASE_MATHFN (BUILT_IN_FDIM
)
1873 CASE_MATHFN (BUILT_IN_FLOOR
)
1874 CASE_MATHFN (BUILT_IN_FMA
)
1875 CASE_MATHFN (BUILT_IN_FMAX
)
1876 CASE_MATHFN (BUILT_IN_FMIN
)
1877 CASE_MATHFN (BUILT_IN_FMOD
)
1878 CASE_MATHFN (BUILT_IN_FREXP
)
1879 CASE_MATHFN (BUILT_IN_GAMMA
)
1880 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1881 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1882 CASE_MATHFN (BUILT_IN_HYPOT
)
1883 CASE_MATHFN (BUILT_IN_ILOGB
)
1884 CASE_MATHFN (BUILT_IN_ICEIL
)
1885 CASE_MATHFN (BUILT_IN_IFLOOR
)
1886 CASE_MATHFN (BUILT_IN_INF
)
1887 CASE_MATHFN (BUILT_IN_IRINT
)
1888 CASE_MATHFN (BUILT_IN_IROUND
)
1889 CASE_MATHFN (BUILT_IN_ISINF
)
1890 CASE_MATHFN (BUILT_IN_J0
)
1891 CASE_MATHFN (BUILT_IN_J1
)
1892 CASE_MATHFN (BUILT_IN_JN
)
1893 CASE_MATHFN (BUILT_IN_LCEIL
)
1894 CASE_MATHFN (BUILT_IN_LDEXP
)
1895 CASE_MATHFN (BUILT_IN_LFLOOR
)
1896 CASE_MATHFN (BUILT_IN_LGAMMA
)
1897 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1898 CASE_MATHFN (BUILT_IN_LLCEIL
)
1899 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1900 CASE_MATHFN (BUILT_IN_LLRINT
)
1901 CASE_MATHFN (BUILT_IN_LLROUND
)
1902 CASE_MATHFN (BUILT_IN_LOG
)
1903 CASE_MATHFN (BUILT_IN_LOG10
)
1904 CASE_MATHFN (BUILT_IN_LOG1P
)
1905 CASE_MATHFN (BUILT_IN_LOG2
)
1906 CASE_MATHFN (BUILT_IN_LOGB
)
1907 CASE_MATHFN (BUILT_IN_LRINT
)
1908 CASE_MATHFN (BUILT_IN_LROUND
)
1909 CASE_MATHFN (BUILT_IN_MODF
)
1910 CASE_MATHFN (BUILT_IN_NAN
)
1911 CASE_MATHFN (BUILT_IN_NANS
)
1912 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1913 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1914 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1915 CASE_MATHFN (BUILT_IN_POW
)
1916 CASE_MATHFN (BUILT_IN_POWI
)
1917 CASE_MATHFN (BUILT_IN_POW10
)
1918 CASE_MATHFN (BUILT_IN_REMAINDER
)
1919 CASE_MATHFN (BUILT_IN_REMQUO
)
1920 CASE_MATHFN (BUILT_IN_RINT
)
1921 CASE_MATHFN (BUILT_IN_ROUND
)
1922 CASE_MATHFN (BUILT_IN_SCALB
)
1923 CASE_MATHFN (BUILT_IN_SCALBLN
)
1924 CASE_MATHFN (BUILT_IN_SCALBN
)
1925 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1926 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1927 CASE_MATHFN (BUILT_IN_SIN
)
1928 CASE_MATHFN (BUILT_IN_SINCOS
)
1929 CASE_MATHFN (BUILT_IN_SINH
)
1930 CASE_MATHFN (BUILT_IN_SQRT
)
1931 CASE_MATHFN (BUILT_IN_TAN
)
1932 CASE_MATHFN (BUILT_IN_TANH
)
1933 CASE_MATHFN (BUILT_IN_TGAMMA
)
1934 CASE_MATHFN (BUILT_IN_TRUNC
)
1935 CASE_MATHFN (BUILT_IN_Y0
)
1936 CASE_MATHFN (BUILT_IN_Y1
)
1937 CASE_MATHFN (BUILT_IN_YN
)
1943 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1945 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1947 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1952 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1955 return builtin_decl_explicit (fcode2
);
1958 /* Like mathfn_built_in_1(), but always use the implicit array. */
1961 mathfn_built_in (tree type
, enum built_in_function fn
)
1963 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1966 /* If errno must be maintained, expand the RTL to check if the result,
1967 TARGET, of a built-in function call, EXP, is NaN, and if so set
1971 expand_errno_check (tree exp
, rtx target
)
1973 rtx_code_label
*lab
= gen_label_rtx ();
1975 /* Test the result; if it is NaN, set errno=EDOM because
1976 the argument was not in the domain. */
1977 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1978 NULL_RTX
, NULL_RTX
, lab
,
1979 /* The jump is very likely. */
1980 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1983 /* If this built-in doesn't throw an exception, set errno directly. */
1984 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1986 #ifdef GEN_ERRNO_RTX
1987 rtx errno_rtx
= GEN_ERRNO_RTX
;
1990 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1992 emit_move_insn (errno_rtx
,
1993 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp
) = 0;
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2005 expand_call (exp
, target
, 0);
2010 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2017 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2019 optab builtin_optab
;
2022 tree fndecl
= get_callee_fndecl (exp
);
2023 enum machine_mode mode
;
2024 bool errno_set
= false;
2025 bool try_widening
= false;
2028 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2031 arg
= CALL_EXPR_ARG (exp
, 0);
2033 switch (DECL_FUNCTION_CODE (fndecl
))
2035 CASE_FLT_FN (BUILT_IN_SQRT
):
2036 errno_set
= ! tree_expr_nonnegative_p (arg
);
2037 try_widening
= true;
2038 builtin_optab
= sqrt_optab
;
2040 CASE_FLT_FN (BUILT_IN_EXP
):
2041 errno_set
= true; builtin_optab
= exp_optab
; break;
2042 CASE_FLT_FN (BUILT_IN_EXP10
):
2043 CASE_FLT_FN (BUILT_IN_POW10
):
2044 errno_set
= true; builtin_optab
= exp10_optab
; break;
2045 CASE_FLT_FN (BUILT_IN_EXP2
):
2046 errno_set
= true; builtin_optab
= exp2_optab
; break;
2047 CASE_FLT_FN (BUILT_IN_EXPM1
):
2048 errno_set
= true; builtin_optab
= expm1_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_LOGB
):
2050 errno_set
= true; builtin_optab
= logb_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_LOG
):
2052 errno_set
= true; builtin_optab
= log_optab
; break;
2053 CASE_FLT_FN (BUILT_IN_LOG10
):
2054 errno_set
= true; builtin_optab
= log10_optab
; break;
2055 CASE_FLT_FN (BUILT_IN_LOG2
):
2056 errno_set
= true; builtin_optab
= log2_optab
; break;
2057 CASE_FLT_FN (BUILT_IN_LOG1P
):
2058 errno_set
= true; builtin_optab
= log1p_optab
; break;
2059 CASE_FLT_FN (BUILT_IN_ASIN
):
2060 builtin_optab
= asin_optab
; break;
2061 CASE_FLT_FN (BUILT_IN_ACOS
):
2062 builtin_optab
= acos_optab
; break;
2063 CASE_FLT_FN (BUILT_IN_TAN
):
2064 builtin_optab
= tan_optab
; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN
):
2066 builtin_optab
= atan_optab
; break;
2067 CASE_FLT_FN (BUILT_IN_FLOOR
):
2068 builtin_optab
= floor_optab
; break;
2069 CASE_FLT_FN (BUILT_IN_CEIL
):
2070 builtin_optab
= ceil_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_TRUNC
):
2072 builtin_optab
= btrunc_optab
; break;
2073 CASE_FLT_FN (BUILT_IN_ROUND
):
2074 builtin_optab
= round_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2076 builtin_optab
= nearbyint_optab
;
2077 if (flag_trapping_math
)
2079 /* Else fallthrough and expand as rint. */
2080 CASE_FLT_FN (BUILT_IN_RINT
):
2081 builtin_optab
= rint_optab
; break;
2082 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2083 builtin_optab
= significand_optab
; break;
2088 /* Make a suitable register to place result in. */
2089 mode
= TYPE_MODE (TREE_TYPE (exp
));
2091 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2094 /* Before working hard, check whether the instruction is available, but try
2095 to widen the mode for specific operations. */
2096 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2097 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2098 && (!errno_set
|| !optimize_insn_for_size_p ()))
2100 rtx result
= gen_reg_rtx (mode
);
2102 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2103 need to expand the argument again. This way, we will not perform
2104 side-effects more the once. */
2105 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2107 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2111 /* Compute into RESULT.
2112 Set RESULT to wherever the result comes back. */
2113 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2118 expand_errno_check (exp
, result
);
2120 /* Output the entire sequence. */
2121 insns
= get_insns ();
2127 /* If we were unable to expand via the builtin, stop the sequence
2128 (without outputting the insns) and call to the library function
2129 with the stabilized argument list. */
2133 return expand_call (exp
, target
, target
== const0_rtx
);
2136 /* Expand a call to the builtin binary math functions (pow and atan2).
2137 Return NULL_RTX if a normal call should be emitted rather than expanding the
2138 function in-line. EXP is the expression that is a call to the builtin
2139 function; if convenient, the result should be placed in TARGET.
2140 SUBTARGET may be used as the target for computing one of EXP's
2144 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2146 optab builtin_optab
;
2147 rtx op0
, op1
, result
;
2149 int op1_type
= REAL_TYPE
;
2150 tree fndecl
= get_callee_fndecl (exp
);
2152 enum machine_mode mode
;
2153 bool errno_set
= true;
2155 switch (DECL_FUNCTION_CODE (fndecl
))
2157 CASE_FLT_FN (BUILT_IN_SCALBN
):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2159 CASE_FLT_FN (BUILT_IN_LDEXP
):
2160 op1_type
= INTEGER_TYPE
;
2165 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2168 arg0
= CALL_EXPR_ARG (exp
, 0);
2169 arg1
= CALL_EXPR_ARG (exp
, 1);
2171 switch (DECL_FUNCTION_CODE (fndecl
))
2173 CASE_FLT_FN (BUILT_IN_POW
):
2174 builtin_optab
= pow_optab
; break;
2175 CASE_FLT_FN (BUILT_IN_ATAN2
):
2176 builtin_optab
= atan2_optab
; break;
2177 CASE_FLT_FN (BUILT_IN_SCALB
):
2178 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2180 builtin_optab
= scalb_optab
; break;
2181 CASE_FLT_FN (BUILT_IN_SCALBN
):
2182 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2183 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2185 /* Fall through... */
2186 CASE_FLT_FN (BUILT_IN_LDEXP
):
2187 builtin_optab
= ldexp_optab
; break;
2188 CASE_FLT_FN (BUILT_IN_FMOD
):
2189 builtin_optab
= fmod_optab
; break;
2190 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2191 CASE_FLT_FN (BUILT_IN_DREM
):
2192 builtin_optab
= remainder_optab
; break;
2197 /* Make a suitable register to place result in. */
2198 mode
= TYPE_MODE (TREE_TYPE (exp
));
2200 /* Before working hard, check whether the instruction is available. */
2201 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2204 result
= gen_reg_rtx (mode
);
2206 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2209 if (errno_set
&& optimize_insn_for_size_p ())
2212 /* Always stabilize the argument list. */
2213 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2214 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2216 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2217 op1
= expand_normal (arg1
);
2221 /* Compute into RESULT.
2222 Set RESULT to wherever the result comes back. */
2223 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2224 result
, 0, OPTAB_DIRECT
);
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2232 return expand_call (exp
, target
, target
== const0_rtx
);
2236 expand_errno_check (exp
, result
);
2238 /* Output the entire sequence. */
2239 insns
= get_insns ();
2246 /* Expand a call to the builtin trinary math functions (fma).
2247 Return NULL_RTX if a normal call should be emitted rather than expanding the
2248 function in-line. EXP is the expression that is a call to the builtin
2249 function; if convenient, the result should be placed in TARGET.
2250 SUBTARGET may be used as the target for computing one of EXP's
2254 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2256 optab builtin_optab
;
2257 rtx op0
, op1
, op2
, result
;
2259 tree fndecl
= get_callee_fndecl (exp
);
2260 tree arg0
, arg1
, arg2
;
2261 enum machine_mode mode
;
2263 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2266 arg0
= CALL_EXPR_ARG (exp
, 0);
2267 arg1
= CALL_EXPR_ARG (exp
, 1);
2268 arg2
= CALL_EXPR_ARG (exp
, 2);
2270 switch (DECL_FUNCTION_CODE (fndecl
))
2272 CASE_FLT_FN (BUILT_IN_FMA
):
2273 builtin_optab
= fma_optab
; break;
2278 /* Make a suitable register to place result in. */
2279 mode
= TYPE_MODE (TREE_TYPE (exp
));
2281 /* Before working hard, check whether the instruction is available. */
2282 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2285 result
= gen_reg_rtx (mode
);
2287 /* Always stabilize the argument list. */
2288 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2289 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2290 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2292 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2293 op1
= expand_normal (arg1
);
2294 op2
= expand_normal (arg2
);
2298 /* Compute into RESULT.
2299 Set RESULT to wherever the result comes back. */
2300 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2303 /* If we were unable to expand via the builtin, stop the sequence
2304 (without outputting the insns) and call to the library function
2305 with the stabilized argument list. */
2309 return expand_call (exp
, target
, target
== const0_rtx
);
2312 /* Output the entire sequence. */
2313 insns
= get_insns ();
2320 /* Expand a call to the builtin sin and cos math functions.
2321 Return NULL_RTX if a normal call should be emitted rather than expanding the
2322 function in-line. EXP is the expression that is a call to the builtin
2323 function; if convenient, the result should be placed in TARGET.
2324 SUBTARGET may be used as the target for computing one of EXP's
2328 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2330 optab builtin_optab
;
2333 tree fndecl
= get_callee_fndecl (exp
);
2334 enum machine_mode mode
;
2337 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2340 arg
= CALL_EXPR_ARG (exp
, 0);
2342 switch (DECL_FUNCTION_CODE (fndecl
))
2344 CASE_FLT_FN (BUILT_IN_SIN
):
2345 CASE_FLT_FN (BUILT_IN_COS
):
2346 builtin_optab
= sincos_optab
; break;
2351 /* Make a suitable register to place result in. */
2352 mode
= TYPE_MODE (TREE_TYPE (exp
));
2354 /* Check if sincos insn is available, otherwise fallback
2355 to sin or cos insn. */
2356 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2357 switch (DECL_FUNCTION_CODE (fndecl
))
2359 CASE_FLT_FN (BUILT_IN_SIN
):
2360 builtin_optab
= sin_optab
; break;
2361 CASE_FLT_FN (BUILT_IN_COS
):
2362 builtin_optab
= cos_optab
; break;
2367 /* Before working hard, check whether the instruction is available. */
2368 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2370 rtx result
= gen_reg_rtx (mode
);
2372 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2373 need to expand the argument again. This way, we will not perform
2374 side-effects more the once. */
2375 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2377 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2381 /* Compute into RESULT.
2382 Set RESULT to wherever the result comes back. */
2383 if (builtin_optab
== sincos_optab
)
2387 switch (DECL_FUNCTION_CODE (fndecl
))
2389 CASE_FLT_FN (BUILT_IN_SIN
):
2390 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2392 CASE_FLT_FN (BUILT_IN_COS
):
2393 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2401 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2405 /* Output the entire sequence. */
2406 insns
= get_insns ();
2412 /* If we were unable to expand via the builtin, stop the sequence
2413 (without outputting the insns) and call to the library function
2414 with the stabilized argument list. */
2418 return expand_call (exp
, target
, target
== const0_rtx
);
2421 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2422 return an RTL instruction code that implements the functionality.
2423 If that isn't possible or available return CODE_FOR_nothing. */
2425 static enum insn_code
2426 interclass_mathfn_icode (tree arg
, tree fndecl
)
2428 bool errno_set
= false;
2429 optab builtin_optab
= unknown_optab
;
2430 enum machine_mode mode
;
2432 switch (DECL_FUNCTION_CODE (fndecl
))
2434 CASE_FLT_FN (BUILT_IN_ILOGB
):
2435 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2436 CASE_FLT_FN (BUILT_IN_ISINF
):
2437 builtin_optab
= isinf_optab
; break;
2438 case BUILT_IN_ISNORMAL
:
2439 case BUILT_IN_ISFINITE
:
2440 CASE_FLT_FN (BUILT_IN_FINITE
):
2441 case BUILT_IN_FINITED32
:
2442 case BUILT_IN_FINITED64
:
2443 case BUILT_IN_FINITED128
:
2444 case BUILT_IN_ISINFD32
:
2445 case BUILT_IN_ISINFD64
:
2446 case BUILT_IN_ISINFD128
:
2447 /* These builtins have no optabs (yet). */
2453 /* There's no easy way to detect the case we need to set EDOM. */
2454 if (flag_errno_math
&& errno_set
)
2455 return CODE_FOR_nothing
;
2457 /* Optab mode depends on the mode of the input argument. */
2458 mode
= TYPE_MODE (TREE_TYPE (arg
));
2461 return optab_handler (builtin_optab
, mode
);
2462 return CODE_FOR_nothing
;
2465 /* Expand a call to one of the builtin math functions that operate on
2466 floating point argument and output an integer result (ilogb, isinf,
2468 Return 0 if a normal call should be emitted rather than expanding the
2469 function in-line. EXP is the expression that is a call to the builtin
2470 function; if convenient, the result should be placed in TARGET. */
2473 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2475 enum insn_code icode
= CODE_FOR_nothing
;
2477 tree fndecl
= get_callee_fndecl (exp
);
2478 enum machine_mode mode
;
2481 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2484 arg
= CALL_EXPR_ARG (exp
, 0);
2485 icode
= interclass_mathfn_icode (arg
, fndecl
);
2486 mode
= TYPE_MODE (TREE_TYPE (arg
));
2488 if (icode
!= CODE_FOR_nothing
)
2490 struct expand_operand ops
[1];
2491 rtx_insn
*last
= get_last_insn ();
2492 tree orig_arg
= arg
;
2494 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2495 need to expand the argument again. This way, we will not perform
2496 side-effects more the once. */
2497 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2499 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2501 if (mode
!= GET_MODE (op0
))
2502 op0
= convert_to_mode (mode
, op0
, 0);
2504 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2505 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2506 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2507 return ops
[0].value
;
2509 delete_insns_since (last
);
2510 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2516 /* Expand a call to the builtin sincos math function.
2517 Return NULL_RTX if a normal call should be emitted rather than expanding the
2518 function in-line. EXP is the expression that is a call to the builtin
2522 expand_builtin_sincos (tree exp
)
2524 rtx op0
, op1
, op2
, target1
, target2
;
2525 enum machine_mode mode
;
2526 tree arg
, sinp
, cosp
;
2528 location_t loc
= EXPR_LOCATION (exp
);
2529 tree alias_type
, alias_off
;
2531 if (!validate_arglist (exp
, REAL_TYPE
,
2532 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2535 arg
= CALL_EXPR_ARG (exp
, 0);
2536 sinp
= CALL_EXPR_ARG (exp
, 1);
2537 cosp
= CALL_EXPR_ARG (exp
, 2);
2539 /* Make a suitable register to place result in. */
2540 mode
= TYPE_MODE (TREE_TYPE (arg
));
2542 /* Check if sincos insn is available, otherwise emit the call. */
2543 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2546 target1
= gen_reg_rtx (mode
);
2547 target2
= gen_reg_rtx (mode
);
2549 op0
= expand_normal (arg
);
2550 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2551 alias_off
= build_int_cst (alias_type
, 0);
2552 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2554 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2557 /* Compute into target1 and target2.
2558 Set TARGET to wherever the result comes back. */
2559 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2560 gcc_assert (result
);
2562 /* Move target1 and target2 to the memory locations indicated
2564 emit_move_insn (op1
, target1
);
2565 emit_move_insn (op2
, target2
);
2570 /* Expand a call to the internal cexpi builtin to the sincos math function.
2571 EXP is the expression that is a call to the builtin function; if convenient,
2572 the result should be placed in TARGET. */
2575 expand_builtin_cexpi (tree exp
, rtx target
)
2577 tree fndecl
= get_callee_fndecl (exp
);
2579 enum machine_mode mode
;
2581 location_t loc
= EXPR_LOCATION (exp
);
2583 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2586 arg
= CALL_EXPR_ARG (exp
, 0);
2587 type
= TREE_TYPE (arg
);
2588 mode
= TYPE_MODE (TREE_TYPE (arg
));
2590 /* Try expanding via a sincos optab, fall back to emitting a libcall
2591 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2592 is only generated from sincos, cexp or if we have either of them. */
2593 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2595 op1
= gen_reg_rtx (mode
);
2596 op2
= gen_reg_rtx (mode
);
2598 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2600 /* Compute into op1 and op2. */
2601 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2603 else if (targetm
.libc_has_function (function_sincos
))
2605 tree call
, fn
= NULL_TREE
;
2609 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2610 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2611 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2612 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2613 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2614 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2618 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2619 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2620 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2621 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2622 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2623 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2625 /* Make sure not to fold the sincos call again. */
2626 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2627 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2628 call
, 3, arg
, top1
, top2
));
2632 tree call
, fn
= NULL_TREE
, narg
;
2633 tree ctype
= build_complex_type (type
);
2635 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2636 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2637 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2638 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2639 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2640 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2644 /* If we don't have a decl for cexp create one. This is the
2645 friendliest fallback if the user calls __builtin_cexpi
2646 without full target C99 function support. */
2647 if (fn
== NULL_TREE
)
2650 const char *name
= NULL
;
2652 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2654 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2656 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2659 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2660 fn
= build_fn_decl (name
, fntype
);
2663 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2664 build_real (type
, dconst0
), arg
);
2666 /* Make sure not to fold the cexp call again. */
2667 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2668 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2669 target
, VOIDmode
, EXPAND_NORMAL
);
2672 /* Now build the proper return type. */
2673 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2674 make_tree (TREE_TYPE (arg
), op2
),
2675 make_tree (TREE_TYPE (arg
), op1
)),
2676 target
, VOIDmode
, EXPAND_NORMAL
);
2679 /* Conveniently construct a function call expression. FNDECL names the
2680 function to be called, N is the number of arguments, and the "..."
2681 parameters are the argument expressions. Unlike build_call_exr
2682 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2685 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2688 tree fntype
= TREE_TYPE (fndecl
);
2689 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2692 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2694 SET_EXPR_LOCATION (fn
, loc
);
2698 /* Expand a call to one of the builtin rounding functions gcc defines
2699 as an extension (lfloor and lceil). As these are gcc extensions we
2700 do not need to worry about setting errno to EDOM.
2701 If expanding via optab fails, lower expression to (int)(floor(x)).
2702 EXP is the expression that is a call to the builtin function;
2703 if convenient, the result should be placed in TARGET. */
2706 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2708 convert_optab builtin_optab
;
2711 tree fndecl
= get_callee_fndecl (exp
);
2712 enum built_in_function fallback_fn
;
2713 tree fallback_fndecl
;
2714 enum machine_mode mode
;
2717 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2720 arg
= CALL_EXPR_ARG (exp
, 0);
2722 switch (DECL_FUNCTION_CODE (fndecl
))
2724 CASE_FLT_FN (BUILT_IN_ICEIL
):
2725 CASE_FLT_FN (BUILT_IN_LCEIL
):
2726 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2727 builtin_optab
= lceil_optab
;
2728 fallback_fn
= BUILT_IN_CEIL
;
2731 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2732 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2733 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2734 builtin_optab
= lfloor_optab
;
2735 fallback_fn
= BUILT_IN_FLOOR
;
2742 /* Make a suitable register to place result in. */
2743 mode
= TYPE_MODE (TREE_TYPE (exp
));
2745 target
= gen_reg_rtx (mode
);
2747 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2748 need to expand the argument again. This way, we will not perform
2749 side-effects more the once. */
2750 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2752 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2756 /* Compute into TARGET. */
2757 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2759 /* Output the entire sequence. */
2760 insns
= get_insns ();
2766 /* If we were unable to expand via the builtin, stop the sequence
2767 (without outputting the insns). */
2770 /* Fall back to floating point rounding optab. */
2771 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2773 /* For non-C99 targets we may end up without a fallback fndecl here
2774 if the user called __builtin_lfloor directly. In this case emit
2775 a call to the floor/ceil variants nevertheless. This should result
2776 in the best user experience for not full C99 targets. */
2777 if (fallback_fndecl
== NULL_TREE
)
2780 const char *name
= NULL
;
2782 switch (DECL_FUNCTION_CODE (fndecl
))
2784 case BUILT_IN_ICEIL
:
2785 case BUILT_IN_LCEIL
:
2786 case BUILT_IN_LLCEIL
:
2789 case BUILT_IN_ICEILF
:
2790 case BUILT_IN_LCEILF
:
2791 case BUILT_IN_LLCEILF
:
2794 case BUILT_IN_ICEILL
:
2795 case BUILT_IN_LCEILL
:
2796 case BUILT_IN_LLCEILL
:
2799 case BUILT_IN_IFLOOR
:
2800 case BUILT_IN_LFLOOR
:
2801 case BUILT_IN_LLFLOOR
:
2804 case BUILT_IN_IFLOORF
:
2805 case BUILT_IN_LFLOORF
:
2806 case BUILT_IN_LLFLOORF
:
2809 case BUILT_IN_IFLOORL
:
2810 case BUILT_IN_LFLOORL
:
2811 case BUILT_IN_LLFLOORL
:
2818 fntype
= build_function_type_list (TREE_TYPE (arg
),
2819 TREE_TYPE (arg
), NULL_TREE
);
2820 fallback_fndecl
= build_fn_decl (name
, fntype
);
2823 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2825 tmp
= expand_normal (exp
);
2826 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2828 /* Truncate the result of floating point optab to integer
2829 via expand_fix (). */
2830 target
= gen_reg_rtx (mode
);
2831 expand_fix (target
, tmp
, 0);
2836 /* Expand a call to one of the builtin math functions doing integer
2838 Return 0 if a normal call should be emitted rather than expanding the
2839 function in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2843 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2845 convert_optab builtin_optab
;
2848 tree fndecl
= get_callee_fndecl (exp
);
2850 enum machine_mode mode
;
2851 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2853 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2856 arg
= CALL_EXPR_ARG (exp
, 0);
2858 switch (DECL_FUNCTION_CODE (fndecl
))
2860 CASE_FLT_FN (BUILT_IN_IRINT
):
2861 fallback_fn
= BUILT_IN_LRINT
;
2863 CASE_FLT_FN (BUILT_IN_LRINT
):
2864 CASE_FLT_FN (BUILT_IN_LLRINT
):
2865 builtin_optab
= lrint_optab
;
2868 CASE_FLT_FN (BUILT_IN_IROUND
):
2869 fallback_fn
= BUILT_IN_LROUND
;
2871 CASE_FLT_FN (BUILT_IN_LROUND
):
2872 CASE_FLT_FN (BUILT_IN_LLROUND
):
2873 builtin_optab
= lround_optab
;
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2884 /* Make a suitable register to place result in. */
2885 mode
= TYPE_MODE (TREE_TYPE (exp
));
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (!flag_errno_math
)
2890 rtx result
= gen_reg_rtx (mode
);
2892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2893 need to expand the argument again. This way, we will not perform
2894 side-effects more the once. */
2895 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2897 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2901 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2903 /* Output the entire sequence. */
2904 insns
= get_insns ();
2910 /* If we were unable to expand via the builtin, stop the sequence
2911 (without outputting the insns) and call to the library function
2912 with the stabilized argument list. */
2916 if (fallback_fn
!= BUILT_IN_NONE
)
2918 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2919 targets, (int) round (x) should never be transformed into
2920 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2921 a call to lround in the hope that the target provides at least some
2922 C99 functions. This should result in the best user experience for
2923 not full C99 targets. */
2924 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2927 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2928 fallback_fndecl
, 1, arg
);
2930 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2931 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2932 return convert_to_mode (mode
, target
, 0);
2935 return expand_call (exp
, target
, target
== const0_rtx
);
2938 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2939 a normal call should be emitted rather than expanding the function
2940 in-line. EXP is the expression that is a call to the builtin
2941 function; if convenient, the result should be placed in TARGET. */
2944 expand_builtin_powi (tree exp
, rtx target
)
2948 enum machine_mode mode
;
2949 enum machine_mode mode2
;
2951 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2954 arg0
= CALL_EXPR_ARG (exp
, 0);
2955 arg1
= CALL_EXPR_ARG (exp
, 1);
2956 mode
= TYPE_MODE (TREE_TYPE (exp
));
2958 /* Emit a libcall to libgcc. */
2960 /* Mode of the 2nd argument must match that of an int. */
2961 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2963 if (target
== NULL_RTX
)
2964 target
= gen_reg_rtx (mode
);
2966 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2967 if (GET_MODE (op0
) != mode
)
2968 op0
= convert_to_mode (mode
, op0
, 0);
2969 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2970 if (GET_MODE (op1
) != mode2
)
2971 op1
= convert_to_mode (mode2
, op1
, 0);
2973 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2974 target
, LCT_CONST
, mode
, 2,
2975 op0
, mode
, op1
, mode2
);
2980 /* Expand expression EXP which is a call to the strlen builtin. Return
2981 NULL_RTX if we failed the caller should emit a normal call, otherwise
2982 try to get the result in TARGET, if convenient. */
2985 expand_builtin_strlen (tree exp
, rtx target
,
2986 enum machine_mode target_mode
)
2988 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2992 struct expand_operand ops
[4];
2995 tree src
= CALL_EXPR_ARG (exp
, 0);
2997 rtx_insn
*before_strlen
;
2998 enum machine_mode insn_mode
= target_mode
;
2999 enum insn_code icode
= CODE_FOR_nothing
;
3002 /* If the length can be computed at compile-time, return it. */
3003 len
= c_strlen (src
, 0);
3005 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3007 /* If the length can be computed at compile-time and is constant
3008 integer, but there are side-effects in src, evaluate
3009 src for side-effects, then return len.
3010 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3011 can be optimized into: i++; x = 3; */
3012 len
= c_strlen (src
, 1);
3013 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3015 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3016 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3019 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3021 /* If SRC is not a pointer type, don't do this operation inline. */
3025 /* Bail out if we can't compute strlen in the right mode. */
3026 while (insn_mode
!= VOIDmode
)
3028 icode
= optab_handler (strlen_optab
, insn_mode
);
3029 if (icode
!= CODE_FOR_nothing
)
3032 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3034 if (insn_mode
== VOIDmode
)
3037 /* Make a place to hold the source address. We will not expand
3038 the actual source until we are sure that the expansion will
3039 not fail -- there are trees that cannot be expanded twice. */
3040 src_reg
= gen_reg_rtx (Pmode
);
3042 /* Mark the beginning of the strlen sequence so we can emit the
3043 source operand later. */
3044 before_strlen
= get_last_insn ();
3046 create_output_operand (&ops
[0], target
, insn_mode
);
3047 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3048 create_integer_operand (&ops
[2], 0);
3049 create_integer_operand (&ops
[3], align
);
3050 if (!maybe_expand_insn (icode
, 4, ops
))
3053 /* Now that we are assured of success, expand the source. */
3055 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3058 #ifdef POINTERS_EXTEND_UNSIGNED
3059 if (GET_MODE (pat
) != Pmode
)
3060 pat
= convert_to_mode (Pmode
, pat
,
3061 POINTERS_EXTEND_UNSIGNED
);
3063 emit_move_insn (src_reg
, pat
);
3069 emit_insn_after (pat
, before_strlen
);
3071 emit_insn_before (pat
, get_insns ());
3073 /* Return the value in the proper mode for this function. */
3074 if (GET_MODE (ops
[0].value
) == target_mode
)
3075 target
= ops
[0].value
;
3076 else if (target
!= 0)
3077 convert_move (target
, ops
[0].value
, 0);
3079 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3085 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3086 bytes from constant string DATA + OFFSET and return it as target
3090 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3091 enum machine_mode mode
)
3093 const char *str
= (const char *) data
;
3095 gcc_assert (offset
>= 0
3096 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3097 <= strlen (str
) + 1));
3099 return c_readstr (str
+ offset
, mode
);
3102 /* LEN specify length of the block of memcpy/memset operation.
3103 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3104 In some cases we can make very likely guess on max size, then we
3105 set it into PROBABLE_MAX_SIZE. */
3108 determine_block_size (tree len
, rtx len_rtx
,
3109 unsigned HOST_WIDE_INT
*min_size
,
3110 unsigned HOST_WIDE_INT
*max_size
,
3111 unsigned HOST_WIDE_INT
*probable_max_size
)
3113 if (CONST_INT_P (len_rtx
))
3115 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3121 enum value_range_type range_type
= VR_UNDEFINED
;
3123 /* Determine bounds from the type. */
3124 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3125 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3128 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3129 *probable_max_size
= *max_size
3130 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3132 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3134 if (TREE_CODE (len
) == SSA_NAME
)
3135 range_type
= get_range_info (len
, &min
, &max
);
3136 if (range_type
== VR_RANGE
)
3138 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3139 *min_size
= min
.to_uhwi ();
3140 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3141 *probable_max_size
= *max_size
= max
.to_uhwi ();
3143 else if (range_type
== VR_ANTI_RANGE
)
3145 /* Anti range 0...N lets us to determine minimal size to N+1. */
3148 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3149 *min_size
= max
.to_uhwi () + 1;
3157 Produce anti range allowing negative values of N. We still
3158 can use the information and make a guess that N is not negative.
3160 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3161 *probable_max_size
= min
.to_uhwi () - 1;
3164 gcc_checking_assert (*max_size
<=
3165 (unsigned HOST_WIDE_INT
)
3166 GET_MODE_MASK (GET_MODE (len_rtx
)));
3169 /* Expand a call EXP to the memcpy builtin.
3170 Return NULL_RTX if we failed, the caller should emit a normal call,
3171 otherwise try to get the result in TARGET, if convenient (and in
3172 mode MODE if that's convenient). */
3175 expand_builtin_memcpy (tree exp
, rtx target
)
3177 if (!validate_arglist (exp
,
3178 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3182 tree dest
= CALL_EXPR_ARG (exp
, 0);
3183 tree src
= CALL_EXPR_ARG (exp
, 1);
3184 tree len
= CALL_EXPR_ARG (exp
, 2);
3185 const char *src_str
;
3186 unsigned int src_align
= get_pointer_alignment (src
);
3187 unsigned int dest_align
= get_pointer_alignment (dest
);
3188 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3189 HOST_WIDE_INT expected_size
= -1;
3190 unsigned int expected_align
= 0;
3191 unsigned HOST_WIDE_INT min_size
;
3192 unsigned HOST_WIDE_INT max_size
;
3193 unsigned HOST_WIDE_INT probable_max_size
;
3195 /* If DEST is not a pointer type, call the normal function. */
3196 if (dest_align
== 0)
3199 /* If either SRC is not a pointer type, don't do this
3200 operation in-line. */
3204 if (currently_expanding_gimple_stmt
)
3205 stringop_block_profile (currently_expanding_gimple_stmt
,
3206 &expected_align
, &expected_size
);
3208 if (expected_align
< dest_align
)
3209 expected_align
= dest_align
;
3210 dest_mem
= get_memory_rtx (dest
, len
);
3211 set_mem_align (dest_mem
, dest_align
);
3212 len_rtx
= expand_normal (len
);
3213 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3214 &probable_max_size
);
3215 src_str
= c_getstr (src
);
3217 /* If SRC is a string constant and block move would be done
3218 by pieces, we can avoid loading the string from memory
3219 and only stored the computed constants. */
3221 && CONST_INT_P (len_rtx
)
3222 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3223 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3224 CONST_CAST (char *, src_str
),
3227 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3228 builtin_memcpy_read_str
,
3229 CONST_CAST (char *, src_str
),
3230 dest_align
, false, 0);
3231 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3232 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3236 src_mem
= get_memory_rtx (src
, len
);
3237 set_mem_align (src_mem
, src_align
);
3239 /* Copy word part most expediently. */
3240 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3241 CALL_EXPR_TAILCALL (exp
)
3242 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3243 expected_align
, expected_size
,
3244 min_size
, max_size
, probable_max_size
);
3248 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3249 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3255 /* Expand a call EXP to the mempcpy builtin.
3256 Return NULL_RTX if we failed; the caller should emit a normal call,
3257 otherwise try to get the result in TARGET, if convenient (and in
3258 mode MODE if that's convenient). If ENDP is 0 return the
3259 destination pointer, if ENDP is 1 return the end pointer ala
3260 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3264 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3266 if (!validate_arglist (exp
,
3267 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3271 tree dest
= CALL_EXPR_ARG (exp
, 0);
3272 tree src
= CALL_EXPR_ARG (exp
, 1);
3273 tree len
= CALL_EXPR_ARG (exp
, 2);
3274 return expand_builtin_mempcpy_args (dest
, src
, len
,
3275 target
, mode
, /*endp=*/ 1);
3279 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3280 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3281 so that this can also be called without constructing an actual CALL_EXPR.
3282 The other arguments and return value are the same as for
3283 expand_builtin_mempcpy. */
3286 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3287 rtx target
, enum machine_mode mode
, int endp
)
3289 /* If return value is ignored, transform mempcpy into memcpy. */
3290 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3292 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3293 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3295 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3299 const char *src_str
;
3300 unsigned int src_align
= get_pointer_alignment (src
);
3301 unsigned int dest_align
= get_pointer_alignment (dest
);
3302 rtx dest_mem
, src_mem
, len_rtx
;
3304 /* If either SRC or DEST is not a pointer type, don't do this
3305 operation in-line. */
3306 if (dest_align
== 0 || src_align
== 0)
3309 /* If LEN is not constant, call the normal function. */
3310 if (! tree_fits_uhwi_p (len
))
3313 len_rtx
= expand_normal (len
);
3314 src_str
= c_getstr (src
);
3316 /* If SRC is a string constant and block move would be done
3317 by pieces, we can avoid loading the string from memory
3318 and only stored the computed constants. */
3320 && CONST_INT_P (len_rtx
)
3321 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3322 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3323 CONST_CAST (char *, src_str
),
3326 dest_mem
= get_memory_rtx (dest
, len
);
3327 set_mem_align (dest_mem
, dest_align
);
3328 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3329 builtin_memcpy_read_str
,
3330 CONST_CAST (char *, src_str
),
3331 dest_align
, false, endp
);
3332 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3333 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3337 if (CONST_INT_P (len_rtx
)
3338 && can_move_by_pieces (INTVAL (len_rtx
),
3339 MIN (dest_align
, src_align
)))
3341 dest_mem
= get_memory_rtx (dest
, len
);
3342 set_mem_align (dest_mem
, dest_align
);
3343 src_mem
= get_memory_rtx (src
, len
);
3344 set_mem_align (src_mem
, src_align
);
3345 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3346 MIN (dest_align
, src_align
), endp
);
3347 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3348 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3357 # define HAVE_movstr 0
3358 # define CODE_FOR_movstr CODE_FOR_nothing
3361 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3362 we failed, the caller should emit a normal call, otherwise try to
3363 get the result in TARGET, if convenient. If ENDP is 0 return the
3364 destination pointer, if ENDP is 1 return the end pointer ala
3365 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3369 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3371 struct expand_operand ops
[3];
3378 dest_mem
= get_memory_rtx (dest
, NULL
);
3379 src_mem
= get_memory_rtx (src
, NULL
);
3382 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3383 dest_mem
= replace_equiv_address (dest_mem
, target
);
3386 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3387 create_fixed_operand (&ops
[1], dest_mem
);
3388 create_fixed_operand (&ops
[2], src_mem
);
3389 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3392 if (endp
&& target
!= const0_rtx
)
3394 target
= ops
[0].value
;
3395 /* movstr is supposed to set end to the address of the NUL
3396 terminator. If the caller requested a mempcpy-like return value,
3400 rtx tem
= plus_constant (GET_MODE (target
),
3401 gen_lowpart (GET_MODE (target
), target
), 1);
3402 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3408 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3409 NULL_RTX if we failed the caller should emit a normal call, otherwise
3410 try to get the result in TARGET, if convenient (and in mode MODE if that's
3414 expand_builtin_strcpy (tree exp
, rtx target
)
3416 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3418 tree dest
= CALL_EXPR_ARG (exp
, 0);
3419 tree src
= CALL_EXPR_ARG (exp
, 1);
3420 return expand_builtin_strcpy_args (dest
, src
, target
);
3425 /* Helper function to do the actual work for expand_builtin_strcpy. The
3426 arguments to the builtin_strcpy call DEST and SRC are broken out
3427 so that this can also be called without constructing an actual CALL_EXPR.
3428 The other arguments and return value are the same as for
3429 expand_builtin_strcpy. */
3432 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3434 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3437 /* Expand a call EXP to the stpcpy builtin.
3438 Return NULL_RTX if we failed the caller should emit a normal call,
3439 otherwise try to get the result in TARGET, if convenient (and in
3440 mode MODE if that's convenient). */
3443 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3446 location_t loc
= EXPR_LOCATION (exp
);
3448 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3451 dst
= CALL_EXPR_ARG (exp
, 0);
3452 src
= CALL_EXPR_ARG (exp
, 1);
3454 /* If return value is ignored, transform stpcpy into strcpy. */
3455 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3457 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3458 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3459 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3466 /* Ensure we get an actual string whose length can be evaluated at
3467 compile-time, not an expression containing a string. This is
3468 because the latter will potentially produce pessimized code
3469 when used to produce the return value. */
3470 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3471 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3473 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3474 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3475 target
, mode
, /*endp=*/2);
3480 if (TREE_CODE (len
) == INTEGER_CST
)
3482 rtx len_rtx
= expand_normal (len
);
3484 if (CONST_INT_P (len_rtx
))
3486 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3492 if (mode
!= VOIDmode
)
3493 target
= gen_reg_rtx (mode
);
3495 target
= gen_reg_rtx (GET_MODE (ret
));
3497 if (GET_MODE (target
) != GET_MODE (ret
))
3498 ret
= gen_lowpart (GET_MODE (target
), ret
);
3500 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3501 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3509 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3513 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3514 bytes from constant string DATA + OFFSET and return it as target
3518 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3519 enum machine_mode mode
)
3521 const char *str
= (const char *) data
;
3523 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3526 return c_readstr (str
+ offset
, mode
);
3529 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3530 NULL_RTX if we failed the caller should emit a normal call. */
3533 expand_builtin_strncpy (tree exp
, rtx target
)
3535 location_t loc
= EXPR_LOCATION (exp
);
3537 if (validate_arglist (exp
,
3538 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3540 tree dest
= CALL_EXPR_ARG (exp
, 0);
3541 tree src
= CALL_EXPR_ARG (exp
, 1);
3542 tree len
= CALL_EXPR_ARG (exp
, 2);
3543 tree slen
= c_strlen (src
, 1);
3545 /* We must be passed a constant len and src parameter. */
3546 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3549 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3551 /* We're required to pad with trailing zeros if the requested
3552 len is greater than strlen(s2)+1. In that case try to
3553 use store_by_pieces, if it fails, punt. */
3554 if (tree_int_cst_lt (slen
, len
))
3556 unsigned int dest_align
= get_pointer_alignment (dest
);
3557 const char *p
= c_getstr (src
);
3560 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3561 || !can_store_by_pieces (tree_to_uhwi (len
),
3562 builtin_strncpy_read_str
,
3563 CONST_CAST (char *, p
),
3567 dest_mem
= get_memory_rtx (dest
, len
);
3568 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3569 builtin_strncpy_read_str
,
3570 CONST_CAST (char *, p
), dest_align
, false, 0);
3571 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3572 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3579 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3580 bytes from constant string DATA + OFFSET and return it as target
3584 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3585 enum machine_mode mode
)
3587 const char *c
= (const char *) data
;
3588 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3590 memset (p
, *c
, GET_MODE_SIZE (mode
));
3592 return c_readstr (p
, mode
);
3595 /* Callback routine for store_by_pieces. Return the RTL of a register
3596 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3597 char value given in the RTL register data. For example, if mode is
3598 4 bytes wide, return the RTL for 0x01010101*data. */
3601 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3602 enum machine_mode mode
)
3608 size
= GET_MODE_SIZE (mode
);
3612 p
= XALLOCAVEC (char, size
);
3613 memset (p
, 1, size
);
3614 coeff
= c_readstr (p
, mode
);
3616 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3617 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3618 return force_reg (mode
, target
);
3621 /* Expand expression EXP, which is a call to the memset builtin. Return
3622 NULL_RTX if we failed the caller should emit a normal call, otherwise
3623 try to get the result in TARGET, if convenient (and in mode MODE if that's
3627 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3629 if (!validate_arglist (exp
,
3630 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3634 tree dest
= CALL_EXPR_ARG (exp
, 0);
3635 tree val
= CALL_EXPR_ARG (exp
, 1);
3636 tree len
= CALL_EXPR_ARG (exp
, 2);
3637 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3641 /* Helper function to do the actual work for expand_builtin_memset. The
3642 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3643 so that this can also be called without constructing an actual CALL_EXPR.
3644 The other arguments and return value are the same as for
3645 expand_builtin_memset. */
3648 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3649 rtx target
, enum machine_mode mode
, tree orig_exp
)
3652 enum built_in_function fcode
;
3653 enum machine_mode val_mode
;
3655 unsigned int dest_align
;
3656 rtx dest_mem
, dest_addr
, len_rtx
;
3657 HOST_WIDE_INT expected_size
= -1;
3658 unsigned int expected_align
= 0;
3659 unsigned HOST_WIDE_INT min_size
;
3660 unsigned HOST_WIDE_INT max_size
;
3661 unsigned HOST_WIDE_INT probable_max_size
;
3663 dest_align
= get_pointer_alignment (dest
);
3665 /* If DEST is not a pointer type, don't do this operation in-line. */
3666 if (dest_align
== 0)
3669 if (currently_expanding_gimple_stmt
)
3670 stringop_block_profile (currently_expanding_gimple_stmt
,
3671 &expected_align
, &expected_size
);
3673 if (expected_align
< dest_align
)
3674 expected_align
= dest_align
;
3676 /* If the LEN parameter is zero, return DEST. */
3677 if (integer_zerop (len
))
3679 /* Evaluate and ignore VAL in case it has side-effects. */
3680 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3681 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3684 /* Stabilize the arguments in case we fail. */
3685 dest
= builtin_save_expr (dest
);
3686 val
= builtin_save_expr (val
);
3687 len
= builtin_save_expr (len
);
3689 len_rtx
= expand_normal (len
);
3690 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3691 &probable_max_size
);
3692 dest_mem
= get_memory_rtx (dest
, len
);
3693 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3695 if (TREE_CODE (val
) != INTEGER_CST
)
3699 val_rtx
= expand_normal (val
);
3700 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3702 /* Assume that we can memset by pieces if we can store
3703 * the coefficients by pieces (in the required modes).
3704 * We can't pass builtin_memset_gen_str as that emits RTL. */
3706 if (tree_fits_uhwi_p (len
)
3707 && can_store_by_pieces (tree_to_uhwi (len
),
3708 builtin_memset_read_str
, &c
, dest_align
,
3711 val_rtx
= force_reg (val_mode
, val_rtx
);
3712 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3713 builtin_memset_gen_str
, val_rtx
, dest_align
,
3716 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3717 dest_align
, expected_align
,
3718 expected_size
, min_size
, max_size
,
3722 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3723 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3727 if (target_char_cast (val
, &c
))
3732 if (tree_fits_uhwi_p (len
)
3733 && can_store_by_pieces (tree_to_uhwi (len
),
3734 builtin_memset_read_str
, &c
, dest_align
,
3736 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3737 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3738 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3739 gen_int_mode (c
, val_mode
),
3740 dest_align
, expected_align
,
3741 expected_size
, min_size
, max_size
,
3745 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3746 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3750 set_mem_align (dest_mem
, dest_align
);
3751 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3752 CALL_EXPR_TAILCALL (orig_exp
)
3753 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3754 expected_align
, expected_size
,
3760 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3761 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3767 fndecl
= get_callee_fndecl (orig_exp
);
3768 fcode
= DECL_FUNCTION_CODE (fndecl
);
3769 if (fcode
== BUILT_IN_MEMSET
)
3770 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3772 else if (fcode
== BUILT_IN_BZERO
)
3773 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3777 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3778 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3779 return expand_call (fn
, target
, target
== const0_rtx
);
3782 /* Expand expression EXP, which is a call to the bzero builtin. Return
3783 NULL_RTX if we failed the caller should emit a normal call. */
3786 expand_builtin_bzero (tree exp
)
3789 location_t loc
= EXPR_LOCATION (exp
);
3791 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3794 dest
= CALL_EXPR_ARG (exp
, 0);
3795 size
= CALL_EXPR_ARG (exp
, 1);
3797 /* New argument list transforming bzero(ptr x, int y) to
3798 memset(ptr x, int 0, size_t y). This is done this way
3799 so that if it isn't expanded inline, we fallback to
3800 calling bzero instead of memset. */
3802 return expand_builtin_memset_args (dest
, integer_zero_node
,
3803 fold_convert_loc (loc
,
3804 size_type_node
, size
),
3805 const0_rtx
, VOIDmode
, exp
);
3808 /* Expand expression EXP, which is a call to the memcmp built-in function.
3809 Return NULL_RTX if we failed and the caller should emit a normal call,
3810 otherwise try to get the result in TARGET, if convenient (and in mode
3811 MODE, if that's convenient). */
3814 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3815 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3817 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3819 if (!validate_arglist (exp
,
3820 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3823 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3824 implementing memcmp because it will stop if it encounters two
3826 #if defined HAVE_cmpmemsi
3828 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3831 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3832 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3833 tree len
= CALL_EXPR_ARG (exp
, 2);
3835 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3836 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3837 enum machine_mode insn_mode
;
3840 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3844 /* If we don't have POINTER_TYPE, call the function. */
3845 if (arg1_align
== 0 || arg2_align
== 0)
3848 /* Make a place to write the result of the instruction. */
3851 && REG_P (result
) && GET_MODE (result
) == insn_mode
3852 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3853 result
= gen_reg_rtx (insn_mode
);
3855 arg1_rtx
= get_memory_rtx (arg1
, len
);
3856 arg2_rtx
= get_memory_rtx (arg2
, len
);
3857 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3859 /* Set MEM_SIZE as appropriate. */
3860 if (CONST_INT_P (arg3_rtx
))
3862 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3863 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3867 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3868 GEN_INT (MIN (arg1_align
, arg2_align
)));
3875 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3876 TYPE_MODE (integer_type_node
), 3,
3877 XEXP (arg1_rtx
, 0), Pmode
,
3878 XEXP (arg2_rtx
, 0), Pmode
,
3879 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3880 TYPE_UNSIGNED (sizetype
)),
3881 TYPE_MODE (sizetype
));
3883 /* Return the value in the proper mode for this function. */
3884 mode
= TYPE_MODE (TREE_TYPE (exp
));
3885 if (GET_MODE (result
) == mode
)
3887 else if (target
!= 0)
3889 convert_move (target
, result
, 0);
3893 return convert_to_mode (mode
, result
, 0);
3895 #endif /* HAVE_cmpmemsi. */
3900 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3901 if we failed the caller should emit a normal call, otherwise try to get
3902 the result in TARGET, if convenient. */
3905 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3907 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3910 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3911 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3912 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3914 rtx arg1_rtx
, arg2_rtx
;
3915 rtx result
, insn
= NULL_RTX
;
3917 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3918 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3920 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3921 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3923 /* If we don't have POINTER_TYPE, call the function. */
3924 if (arg1_align
== 0 || arg2_align
== 0)
3927 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3928 arg1
= builtin_save_expr (arg1
);
3929 arg2
= builtin_save_expr (arg2
);
3931 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3932 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3934 #ifdef HAVE_cmpstrsi
3935 /* Try to call cmpstrsi. */
3938 enum machine_mode insn_mode
3939 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3941 /* Make a place to write the result of the instruction. */
3944 && REG_P (result
) && GET_MODE (result
) == insn_mode
3945 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3946 result
= gen_reg_rtx (insn_mode
);
3948 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3949 GEN_INT (MIN (arg1_align
, arg2_align
)));
3952 #ifdef HAVE_cmpstrnsi
3953 /* Try to determine at least one length and call cmpstrnsi. */
3954 if (!insn
&& HAVE_cmpstrnsi
)
3959 enum machine_mode insn_mode
3960 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3961 tree len1
= c_strlen (arg1
, 1);
3962 tree len2
= c_strlen (arg2
, 1);
3965 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3967 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3969 /* If we don't have a constant length for the first, use the length
3970 of the second, if we know it. We don't require a constant for
3971 this case; some cost analysis could be done if both are available
3972 but neither is constant. For now, assume they're equally cheap,
3973 unless one has side effects. If both strings have constant lengths,
3980 else if (TREE_SIDE_EFFECTS (len1
))
3982 else if (TREE_SIDE_EFFECTS (len2
))
3984 else if (TREE_CODE (len1
) != INTEGER_CST
)
3986 else if (TREE_CODE (len2
) != INTEGER_CST
)
3988 else if (tree_int_cst_lt (len1
, len2
))
3993 /* If both arguments have side effects, we cannot optimize. */
3994 if (!len
|| TREE_SIDE_EFFECTS (len
))
3997 arg3_rtx
= expand_normal (len
);
3999 /* Make a place to write the result of the instruction. */
4002 && REG_P (result
) && GET_MODE (result
) == insn_mode
4003 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4004 result
= gen_reg_rtx (insn_mode
);
4006 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4007 GEN_INT (MIN (arg1_align
, arg2_align
)));
4013 enum machine_mode mode
;
4016 /* Return the value in the proper mode for this function. */
4017 mode
= TYPE_MODE (TREE_TYPE (exp
));
4018 if (GET_MODE (result
) == mode
)
4021 return convert_to_mode (mode
, result
, 0);
4022 convert_move (target
, result
, 0);
4026 /* Expand the library call ourselves using a stabilized argument
4027 list to avoid re-evaluating the function's arguments twice. */
4028 #ifdef HAVE_cmpstrnsi
4031 fndecl
= get_callee_fndecl (exp
);
4032 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4033 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4034 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4035 return expand_call (fn
, target
, target
== const0_rtx
);
4041 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4042 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
4046 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4047 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4049 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4051 if (!validate_arglist (exp
,
4052 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4055 /* If c_strlen can determine an expression for one of the string
4056 lengths, and it doesn't have side effects, then emit cmpstrnsi
4057 using length MIN(strlen(string)+1, arg3). */
4058 #ifdef HAVE_cmpstrnsi
4061 tree len
, len1
, len2
;
4062 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4065 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4066 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4067 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4069 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4070 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4071 enum machine_mode insn_mode
4072 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4074 len1
= c_strlen (arg1
, 1);
4075 len2
= c_strlen (arg2
, 1);
4078 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4080 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4082 /* If we don't have a constant length for the first, use the length
4083 of the second, if we know it. We don't require a constant for
4084 this case; some cost analysis could be done if both are available
4085 but neither is constant. For now, assume they're equally cheap,
4086 unless one has side effects. If both strings have constant lengths,
4093 else if (TREE_SIDE_EFFECTS (len1
))
4095 else if (TREE_SIDE_EFFECTS (len2
))
4097 else if (TREE_CODE (len1
) != INTEGER_CST
)
4099 else if (TREE_CODE (len2
) != INTEGER_CST
)
4101 else if (tree_int_cst_lt (len1
, len2
))
4106 /* If both arguments have side effects, we cannot optimize. */
4107 if (!len
|| TREE_SIDE_EFFECTS (len
))
4110 /* The actual new length parameter is MIN(len,arg3). */
4111 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4112 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4114 /* If we don't have POINTER_TYPE, call the function. */
4115 if (arg1_align
== 0 || arg2_align
== 0)
4118 /* Make a place to write the result of the instruction. */
4121 && REG_P (result
) && GET_MODE (result
) == insn_mode
4122 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4123 result
= gen_reg_rtx (insn_mode
);
4125 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4126 arg1
= builtin_save_expr (arg1
);
4127 arg2
= builtin_save_expr (arg2
);
4128 len
= builtin_save_expr (len
);
4130 arg1_rtx
= get_memory_rtx (arg1
, len
);
4131 arg2_rtx
= get_memory_rtx (arg2
, len
);
4132 arg3_rtx
= expand_normal (len
);
4133 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4134 GEN_INT (MIN (arg1_align
, arg2_align
)));
4139 /* Return the value in the proper mode for this function. */
4140 mode
= TYPE_MODE (TREE_TYPE (exp
));
4141 if (GET_MODE (result
) == mode
)
4144 return convert_to_mode (mode
, result
, 0);
4145 convert_move (target
, result
, 0);
4149 /* Expand the library call ourselves using a stabilized argument
4150 list to avoid re-evaluating the function's arguments twice. */
4151 fndecl
= get_callee_fndecl (exp
);
4152 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4154 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4155 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4156 return expand_call (fn
, target
, target
== const0_rtx
);
4162 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4163 if that's convenient. */
4166 expand_builtin_saveregs (void)
4171 /* Don't do __builtin_saveregs more than once in a function.
4172 Save the result of the first call and reuse it. */
4173 if (saveregs_value
!= 0)
4174 return saveregs_value
;
4176 /* When this function is called, it means that registers must be
4177 saved on entry to this function. So we migrate the call to the
4178 first insn of this function. */
4182 /* Do whatever the machine needs done in this case. */
4183 val
= targetm
.calls
.expand_builtin_saveregs ();
4188 saveregs_value
= val
;
4190 /* Put the insns after the NOTE that starts the function. If this
4191 is inside a start_sequence, make the outer-level insn chain current, so
4192 the code is placed at the start of the function. */
4193 push_topmost_sequence ();
4194 emit_insn_after (seq
, entry_of_function ());
4195 pop_topmost_sequence ();
4200 /* Expand a call to __builtin_next_arg. */
4203 expand_builtin_next_arg (void)
4205 /* Checking arguments is already done in fold_builtin_next_arg
4206 that must be called before this function. */
4207 return expand_binop (ptr_mode
, add_optab
,
4208 crtl
->args
.internal_arg_pointer
,
4209 crtl
->args
.arg_offset_rtx
,
4210 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4213 /* Make it easier for the backends by protecting the valist argument
4214 from multiple evaluations. */
4217 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4219 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4221 /* The current way of determining the type of valist is completely
4222 bogus. We should have the information on the va builtin instead. */
4224 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4226 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4228 if (TREE_SIDE_EFFECTS (valist
))
4229 valist
= save_expr (valist
);
4231 /* For this case, the backends will be expecting a pointer to
4232 vatype, but it's possible we've actually been given an array
4233 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4235 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4237 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4238 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4243 tree pt
= build_pointer_type (vatype
);
4247 if (! TREE_SIDE_EFFECTS (valist
))
4250 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4251 TREE_SIDE_EFFECTS (valist
) = 1;
4254 if (TREE_SIDE_EFFECTS (valist
))
4255 valist
= save_expr (valist
);
4256 valist
= fold_build2_loc (loc
, MEM_REF
,
4257 vatype
, valist
, build_int_cst (pt
, 0));
4263 /* The "standard" definition of va_list is void*. */
4266 std_build_builtin_va_list (void)
4268 return ptr_type_node
;
4271 /* The "standard" abi va_list is va_list_type_node. */
4274 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4276 return va_list_type_node
;
4279 /* The "standard" type of va_list is va_list_type_node. */
4282 std_canonical_va_list_type (tree type
)
4286 if (INDIRECT_REF_P (type
))
4287 type
= TREE_TYPE (type
);
4288 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4289 type
= TREE_TYPE (type
);
4290 wtype
= va_list_type_node
;
4292 /* Treat structure va_list types. */
4293 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4294 htype
= TREE_TYPE (htype
);
4295 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4297 /* If va_list is an array type, the argument may have decayed
4298 to a pointer type, e.g. by being passed to another function.
4299 In that case, unwrap both types so that we can compare the
4300 underlying records. */
4301 if (TREE_CODE (htype
) == ARRAY_TYPE
4302 || POINTER_TYPE_P (htype
))
4304 wtype
= TREE_TYPE (wtype
);
4305 htype
= TREE_TYPE (htype
);
4308 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4309 return va_list_type_node
;
4314 /* The "standard" implementation of va_start: just assign `nextarg' to
4318 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4320 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4321 convert_move (va_r
, nextarg
, 0);
4324 /* Expand EXP, a call to __builtin_va_start. */
4327 expand_builtin_va_start (tree exp
)
4331 location_t loc
= EXPR_LOCATION (exp
);
4333 if (call_expr_nargs (exp
) < 2)
4335 error_at (loc
, "too few arguments to function %<va_start%>");
4339 if (fold_builtin_next_arg (exp
, true))
4342 nextarg
= expand_builtin_next_arg ();
4343 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4345 if (targetm
.expand_builtin_va_start
)
4346 targetm
.expand_builtin_va_start (valist
, nextarg
);
4348 std_expand_builtin_va_start (valist
, nextarg
);
4353 /* Expand EXP, a call to __builtin_va_end. */
4356 expand_builtin_va_end (tree exp
)
4358 tree valist
= CALL_EXPR_ARG (exp
, 0);
4360 /* Evaluate for side effects, if needed. I hate macros that don't
4362 if (TREE_SIDE_EFFECTS (valist
))
4363 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4368 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4369 builtin rather than just as an assignment in stdarg.h because of the
4370 nastiness of array-type va_list types. */
4373 expand_builtin_va_copy (tree exp
)
4376 location_t loc
= EXPR_LOCATION (exp
);
4378 dst
= CALL_EXPR_ARG (exp
, 0);
4379 src
= CALL_EXPR_ARG (exp
, 1);
4381 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4382 src
= stabilize_va_list_loc (loc
, src
, 0);
4384 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4386 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4388 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4389 TREE_SIDE_EFFECTS (t
) = 1;
4390 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4394 rtx dstb
, srcb
, size
;
4396 /* Evaluate to pointers. */
4397 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4398 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4399 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4400 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4402 dstb
= convert_memory_address (Pmode
, dstb
);
4403 srcb
= convert_memory_address (Pmode
, srcb
);
4405 /* "Dereference" to BLKmode memories. */
4406 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4407 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4408 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4409 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4410 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4411 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4414 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4420 /* Expand a call to one of the builtin functions __builtin_frame_address or
4421 __builtin_return_address. */
4424 expand_builtin_frame_address (tree fndecl
, tree exp
)
4426 /* The argument must be a nonnegative integer constant.
4427 It counts the number of frames to scan up the stack.
4428 The value is the return address saved in that frame. */
4429 if (call_expr_nargs (exp
) == 0)
4430 /* Warning about missing arg was already issued. */
4432 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4434 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4435 error ("invalid argument to %<__builtin_frame_address%>");
4437 error ("invalid argument to %<__builtin_return_address%>");
4443 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4444 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4446 /* Some ports cannot access arbitrary stack frames. */
4449 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4450 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4452 warning (0, "unsupported argument to %<__builtin_return_address%>");
4456 /* For __builtin_frame_address, return what we've got. */
4457 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4461 && ! CONSTANT_P (tem
))
4462 tem
= copy_addr_to_reg (tem
);
4467 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4468 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4469 is the same as for allocate_dynamic_stack_space. */
4472 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4478 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4479 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4482 = (alloca_with_align
4483 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4484 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4489 /* Compute the argument. */
4490 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4492 /* Compute the alignment. */
4493 align
= (alloca_with_align
4494 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4495 : BIGGEST_ALIGNMENT
);
4497 /* Allocate the desired space. */
4498 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4499 result
= convert_memory_address (ptr_mode
, result
);
4504 /* Expand a call to bswap builtin in EXP.
4505 Return NULL_RTX if a normal call should be emitted rather than expanding the
4506 function in-line. If convenient, the result should be placed in TARGET.
4507 SUBTARGET may be used as the target for computing one of EXP's operands. */
4510 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4516 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4519 arg
= CALL_EXPR_ARG (exp
, 0);
4520 op0
= expand_expr (arg
,
4521 subtarget
&& GET_MODE (subtarget
) == target_mode
4522 ? subtarget
: NULL_RTX
,
4523 target_mode
, EXPAND_NORMAL
);
4524 if (GET_MODE (op0
) != target_mode
)
4525 op0
= convert_to_mode (target_mode
, op0
, 1);
4527 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4529 gcc_assert (target
);
4531 return convert_to_mode (target_mode
, target
, 1);
4534 /* Expand a call to a unary builtin in EXP.
4535 Return NULL_RTX if a normal call should be emitted rather than expanding the
4536 function in-line. If convenient, the result should be placed in TARGET.
4537 SUBTARGET may be used as the target for computing one of EXP's operands. */
4540 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4541 rtx subtarget
, optab op_optab
)
4545 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4548 /* Compute the argument. */
4549 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4551 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4552 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4553 VOIDmode
, EXPAND_NORMAL
);
4554 /* Compute op, into TARGET if possible.
4555 Set TARGET to wherever the result comes back. */
4556 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4557 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4558 gcc_assert (target
);
4560 return convert_to_mode (target_mode
, target
, 0);
4563 /* Expand a call to __builtin_expect. We just return our argument
4564 as the builtin_expect semantic should've been already executed by
4565 tree branch prediction pass. */
4568 expand_builtin_expect (tree exp
, rtx target
)
4572 if (call_expr_nargs (exp
) < 2)
4574 arg
= CALL_EXPR_ARG (exp
, 0);
4576 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4577 /* When guessing was done, the hints should be already stripped away. */
4578 gcc_assert (!flag_guess_branch_prob
4579 || optimize
== 0 || seen_error ());
4583 /* Expand a call to __builtin_assume_aligned. We just return our first
4584 argument as the builtin_assume_aligned semantic should've been already
4588 expand_builtin_assume_aligned (tree exp
, rtx target
)
4590 if (call_expr_nargs (exp
) < 2)
4592 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4594 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4595 && (call_expr_nargs (exp
) < 3
4596 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4601 expand_builtin_trap (void)
4606 rtx insn
= emit_insn (gen_trap ());
4607 /* For trap insns when not accumulating outgoing args force
4608 REG_ARGS_SIZE note to prevent crossjumping of calls with
4609 different args sizes. */
4610 if (!ACCUMULATE_OUTGOING_ARGS
)
4611 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4615 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4619 /* Expand a call to __builtin_unreachable. We do nothing except emit
4620 a barrier saying that control flow will not pass here.
4622 It is the responsibility of the program being compiled to ensure
4623 that control flow does never reach __builtin_unreachable. */
4625 expand_builtin_unreachable (void)
4630 /* Expand EXP, a call to fabs, fabsf or fabsl.
4631 Return NULL_RTX if a normal call should be emitted rather than expanding
4632 the function inline. If convenient, the result should be placed
4633 in TARGET. SUBTARGET may be used as the target for computing
4637 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4639 enum machine_mode mode
;
4643 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4646 arg
= CALL_EXPR_ARG (exp
, 0);
4647 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4648 mode
= TYPE_MODE (TREE_TYPE (arg
));
4649 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4650 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4653 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4654 Return NULL is a normal call should be emitted rather than expanding the
4655 function inline. If convenient, the result should be placed in TARGET.
4656 SUBTARGET may be used as the target for computing the operand. */
4659 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4664 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4667 arg
= CALL_EXPR_ARG (exp
, 0);
4668 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4670 arg
= CALL_EXPR_ARG (exp
, 1);
4671 op1
= expand_normal (arg
);
4673 return expand_copysign (op0
, op1
, target
);
4676 /* Expand a call to __builtin___clear_cache. */
4679 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4681 #ifndef HAVE_clear_cache
4682 #ifdef CLEAR_INSN_CACHE
4683 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4684 does something. Just do the default expansion to a call to
4688 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4689 does nothing. There is no need to call it. Do nothing. */
4691 #endif /* CLEAR_INSN_CACHE */
4693 /* We have a "clear_cache" insn, and it will handle everything. */
4695 rtx begin_rtx
, end_rtx
;
4697 /* We must not expand to a library call. If we did, any
4698 fallback library function in libgcc that might contain a call to
4699 __builtin___clear_cache() would recurse infinitely. */
4700 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4702 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4706 if (HAVE_clear_cache
)
4708 struct expand_operand ops
[2];
4710 begin
= CALL_EXPR_ARG (exp
, 0);
4711 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4713 end
= CALL_EXPR_ARG (exp
, 1);
4714 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4716 create_address_operand (&ops
[0], begin_rtx
);
4717 create_address_operand (&ops
[1], end_rtx
);
4718 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4722 #endif /* HAVE_clear_cache */
4725 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4728 round_trampoline_addr (rtx tramp
)
4730 rtx temp
, addend
, mask
;
4732 /* If we don't need too much alignment, we'll have been guaranteed
4733 proper alignment by get_trampoline_type. */
4734 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4737 /* Round address up to desired boundary. */
4738 temp
= gen_reg_rtx (Pmode
);
4739 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4740 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4742 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4743 temp
, 0, OPTAB_LIB_WIDEN
);
4744 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4745 temp
, 0, OPTAB_LIB_WIDEN
);
4751 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4753 tree t_tramp
, t_func
, t_chain
;
4754 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4756 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4757 POINTER_TYPE
, VOID_TYPE
))
4760 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4761 t_func
= CALL_EXPR_ARG (exp
, 1);
4762 t_chain
= CALL_EXPR_ARG (exp
, 2);
4764 r_tramp
= expand_normal (t_tramp
);
4765 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4766 MEM_NOTRAP_P (m_tramp
) = 1;
4768 /* If ONSTACK, the TRAMP argument should be the address of a field
4769 within the local function's FRAME decl. Either way, let's see if
4770 we can fill in the MEM_ATTRs for this memory. */
4771 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4772 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4774 /* Creator of a heap trampoline is responsible for making sure the
4775 address is aligned to at least STACK_BOUNDARY. Normally malloc
4776 will ensure this anyhow. */
4777 tmp
= round_trampoline_addr (r_tramp
);
4780 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4781 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4782 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4785 /* The FUNC argument should be the address of the nested function.
4786 Extract the actual function decl to pass to the hook. */
4787 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4788 t_func
= TREE_OPERAND (t_func
, 0);
4789 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4791 r_chain
= expand_normal (t_chain
);
4793 /* Generate insns to initialize the trampoline. */
4794 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4798 trampolines_created
= 1;
4800 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4801 "trampoline generated for nested function %qD", t_func
);
4808 expand_builtin_adjust_trampoline (tree exp
)
4812 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4815 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4816 tramp
= round_trampoline_addr (tramp
);
4817 if (targetm
.calls
.trampoline_adjust_address
)
4818 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4823 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4824 function. The function first checks whether the back end provides
4825 an insn to implement signbit for the respective mode. If not, it
4826 checks whether the floating point format of the value is such that
4827 the sign bit can be extracted. If that is not the case, the
4828 function returns NULL_RTX to indicate that a normal call should be
4829 emitted rather than expanding the function in-line. EXP is the
4830 expression that is a call to the builtin function; if convenient,
4831 the result should be placed in TARGET. */
4833 expand_builtin_signbit (tree exp
, rtx target
)
4835 const struct real_format
*fmt
;
4836 enum machine_mode fmode
, imode
, rmode
;
4839 enum insn_code icode
;
4841 location_t loc
= EXPR_LOCATION (exp
);
4843 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4846 arg
= CALL_EXPR_ARG (exp
, 0);
4847 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4848 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4849 fmt
= REAL_MODE_FORMAT (fmode
);
4851 arg
= builtin_save_expr (arg
);
4853 /* Expand the argument yielding a RTX expression. */
4854 temp
= expand_normal (arg
);
4856 /* Check if the back end provides an insn that handles signbit for the
4858 icode
= optab_handler (signbit_optab
, fmode
);
4859 if (icode
!= CODE_FOR_nothing
)
4861 rtx_insn
*last
= get_last_insn ();
4862 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4863 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4865 delete_insns_since (last
);
4868 /* For floating point formats without a sign bit, implement signbit
4870 bitpos
= fmt
->signbit_ro
;
4873 /* But we can't do this if the format supports signed zero. */
4874 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4877 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4878 build_real (TREE_TYPE (arg
), dconst0
));
4879 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4882 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4884 imode
= int_mode_for_mode (fmode
);
4885 if (imode
== BLKmode
)
4887 temp
= gen_lowpart (imode
, temp
);
4892 /* Handle targets with different FP word orders. */
4893 if (FLOAT_WORDS_BIG_ENDIAN
)
4894 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4896 word
= bitpos
/ BITS_PER_WORD
;
4897 temp
= operand_subword_force (temp
, word
, fmode
);
4898 bitpos
= bitpos
% BITS_PER_WORD
;
4901 /* Force the intermediate word_mode (or narrower) result into a
4902 register. This avoids attempting to create paradoxical SUBREGs
4903 of floating point modes below. */
4904 temp
= force_reg (imode
, temp
);
4906 /* If the bitpos is within the "result mode" lowpart, the operation
4907 can be implement with a single bitwise AND. Otherwise, we need
4908 a right shift and an AND. */
4910 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4912 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4914 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4915 temp
= gen_lowpart (rmode
, temp
);
4916 temp
= expand_binop (rmode
, and_optab
, temp
,
4917 immed_wide_int_const (mask
, rmode
),
4918 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4922 /* Perform a logical right shift to place the signbit in the least
4923 significant bit, then truncate the result to the desired mode
4924 and mask just this bit. */
4925 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4926 temp
= gen_lowpart (rmode
, temp
);
4927 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4928 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4934 /* Expand fork or exec calls. TARGET is the desired target of the
4935 call. EXP is the call. FN is the
4936 identificator of the actual function. IGNORE is nonzero if the
4937 value is to be ignored. */
4940 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4945 /* If we are not profiling, just call the function. */
4946 if (!profile_arc_flag
)
4949 /* Otherwise call the wrapper. This should be equivalent for the rest of
4950 compiler, so the code does not diverge, and the wrapper may run the
4951 code necessary for keeping the profiling sane. */
4953 switch (DECL_FUNCTION_CODE (fn
))
4956 id
= get_identifier ("__gcov_fork");
4959 case BUILT_IN_EXECL
:
4960 id
= get_identifier ("__gcov_execl");
4963 case BUILT_IN_EXECV
:
4964 id
= get_identifier ("__gcov_execv");
4967 case BUILT_IN_EXECLP
:
4968 id
= get_identifier ("__gcov_execlp");
4971 case BUILT_IN_EXECLE
:
4972 id
= get_identifier ("__gcov_execle");
4975 case BUILT_IN_EXECVP
:
4976 id
= get_identifier ("__gcov_execvp");
4979 case BUILT_IN_EXECVE
:
4980 id
= get_identifier ("__gcov_execve");
4987 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
4988 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
4989 DECL_EXTERNAL (decl
) = 1;
4990 TREE_PUBLIC (decl
) = 1;
4991 DECL_ARTIFICIAL (decl
) = 1;
4992 TREE_NOTHROW (decl
) = 1;
4993 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
4994 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
4995 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
4996 return expand_call (call
, target
, ignore
);
5001 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5002 the pointer in these functions is void*, the tree optimizers may remove
5003 casts. The mode computed in expand_builtin isn't reliable either, due
5004 to __sync_bool_compare_and_swap.
5006 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5007 group of builtins. This gives us log2 of the mode size. */
5009 static inline enum machine_mode
5010 get_builtin_sync_mode (int fcode_diff
)
5012 /* The size is not negotiable, so ask not to get BLKmode in return
5013 if the target indicates that a smaller size would be better. */
5014 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5017 /* Expand the memory expression LOC and return the appropriate memory operand
5018 for the builtin_sync operations. */
5021 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5025 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5026 addr
= convert_memory_address (Pmode
, addr
);
5028 /* Note that we explicitly do not want any alias information for this
5029 memory, so that we kill all other live memories. Otherwise we don't
5030 satisfy the full barrier semantics of the intrinsic. */
5031 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5033 /* The alignment needs to be at least according to that of the mode. */
5034 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5035 get_pointer_alignment (loc
)));
5036 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5037 MEM_VOLATILE_P (mem
) = 1;
5042 /* Make sure an argument is in the right mode.
5043 EXP is the tree argument.
5044 MODE is the mode it should be in. */
5047 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5050 enum machine_mode old_mode
;
5052 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5053 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5054 of CONST_INTs, where we know the old_mode only from the call argument. */
5056 old_mode
= GET_MODE (val
);
5057 if (old_mode
== VOIDmode
)
5058 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5059 val
= convert_modes (mode
, old_mode
, val
, 1);
5064 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5065 EXP is the CALL_EXPR. CODE is the rtx code
5066 that corresponds to the arithmetic or logical operation from the name;
5067 an exception here is that NOT actually means NAND. TARGET is an optional
5068 place for us to store the results; AFTER is true if this is the
5069 fetch_and_xxx form. */
5072 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5073 enum rtx_code code
, bool after
,
5077 location_t loc
= EXPR_LOCATION (exp
);
5079 if (code
== NOT
&& warn_sync_nand
)
5081 tree fndecl
= get_callee_fndecl (exp
);
5082 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5084 static bool warned_f_a_n
, warned_n_a_f
;
5088 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5089 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5090 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5091 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5092 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5096 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5097 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5098 warned_f_a_n
= true;
5101 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5102 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5103 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5104 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5105 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5109 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5110 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5111 warned_n_a_f
= true;
5119 /* Expand the operands. */
5120 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5121 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5123 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5127 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5128 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5129 true if this is the boolean form. TARGET is a place for us to store the
5130 results; this is NOT optional if IS_BOOL is true. */
5133 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5134 bool is_bool
, rtx target
)
5136 rtx old_val
, new_val
, mem
;
5139 /* Expand the operands. */
5140 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5141 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5142 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5144 pbool
= poval
= NULL
;
5145 if (target
!= const0_rtx
)
5152 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5153 false, MEMMODEL_SEQ_CST
,
5160 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5161 general form is actually an atomic exchange, and some targets only
5162 support a reduced form with the second argument being a constant 1.
5163 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5167 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5172 /* Expand the operands. */
5173 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5174 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5176 return expand_sync_lock_test_and_set (target
, mem
, val
);
5179 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5182 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5186 /* Expand the operands. */
5187 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5189 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5192 /* Given an integer representing an ``enum memmodel'', verify its
5193 correctness and return the memory model enum. */
5195 static enum memmodel
5196 get_memmodel (tree exp
)
5199 unsigned HOST_WIDE_INT val
;
5201 /* If the parameter is not a constant, it's a run time value so we'll just
5202 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5203 if (TREE_CODE (exp
) != INTEGER_CST
)
5204 return MEMMODEL_SEQ_CST
;
5206 op
= expand_normal (exp
);
5209 if (targetm
.memmodel_check
)
5210 val
= targetm
.memmodel_check (val
);
5211 else if (val
& ~MEMMODEL_MASK
)
5213 warning (OPT_Winvalid_memory_model
,
5214 "Unknown architecture specifier in memory model to builtin.");
5215 return MEMMODEL_SEQ_CST
;
5218 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5220 warning (OPT_Winvalid_memory_model
,
5221 "invalid memory model argument to builtin");
5222 return MEMMODEL_SEQ_CST
;
5225 return (enum memmodel
) val
;
5228 /* Expand the __atomic_exchange intrinsic:
5229 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5230 EXP is the CALL_EXPR.
5231 TARGET is an optional place for us to store the results. */
5234 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5237 enum memmodel model
;
5239 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5240 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5242 error ("invalid memory model for %<__atomic_exchange%>");
5246 if (!flag_inline_atomics
)
5249 /* Expand the operands. */
5250 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5251 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5253 return expand_atomic_exchange (target
, mem
, val
, model
);
5256 /* Expand the __atomic_compare_exchange intrinsic:
5257 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5258 TYPE desired, BOOL weak,
5259 enum memmodel success,
5260 enum memmodel failure)
5261 EXP is the CALL_EXPR.
5262 TARGET is an optional place for us to store the results. */
5265 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5268 rtx expect
, desired
, mem
, oldval
;
5269 rtx_code_label
*label
;
5270 enum memmodel success
, failure
;
5274 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5275 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5277 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5278 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5280 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5284 if (failure
> success
)
5286 error ("failure memory model cannot be stronger than success "
5287 "memory model for %<__atomic_compare_exchange%>");
5291 if (!flag_inline_atomics
)
5294 /* Expand the operands. */
5295 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5297 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5298 expect
= convert_memory_address (Pmode
, expect
);
5299 expect
= gen_rtx_MEM (mode
, expect
);
5300 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5302 weak
= CALL_EXPR_ARG (exp
, 3);
5304 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5307 if (target
== const0_rtx
)
5310 /* Lest the rtl backend create a race condition with an imporoper store
5311 to memory, always create a new pseudo for OLDVAL. */
5314 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5315 is_weak
, success
, failure
))
5318 /* Conditionally store back to EXPECT, lest we create a race condition
5319 with an improper store to memory. */
5320 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5321 the normal case where EXPECT is totally private, i.e. a register. At
5322 which point the store can be unconditional. */
5323 label
= gen_label_rtx ();
5324 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
, VOIDmode
, 1, label
);
5325 emit_move_insn (expect
, oldval
);
5331 /* Expand the __atomic_load intrinsic:
5332 TYPE __atomic_load (TYPE *object, enum memmodel)
5333 EXP is the CALL_EXPR.
5334 TARGET is an optional place for us to store the results. */
5337 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5340 enum memmodel model
;
5342 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5343 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5344 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5346 error ("invalid memory model for %<__atomic_load%>");
5350 if (!flag_inline_atomics
)
5353 /* Expand the operand. */
5354 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5356 return expand_atomic_load (target
, mem
, model
);
5360 /* Expand the __atomic_store intrinsic:
5361 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5362 EXP is the CALL_EXPR.
5363 TARGET is an optional place for us to store the results. */
5366 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5369 enum memmodel model
;
5371 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5372 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5373 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5374 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5376 error ("invalid memory model for %<__atomic_store%>");
5380 if (!flag_inline_atomics
)
5383 /* Expand the operands. */
5384 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5385 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5387 return expand_atomic_store (mem
, val
, model
, false);
5390 /* Expand the __atomic_fetch_XXX intrinsic:
5391 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5392 EXP is the CALL_EXPR.
5393 TARGET is an optional place for us to store the results.
5394 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5395 FETCH_AFTER is true if returning the result of the operation.
5396 FETCH_AFTER is false if returning the value before the operation.
5397 IGNORE is true if the result is not used.
5398 EXT_CALL is the correct builtin for an external call if this cannot be
5399 resolved to an instruction sequence. */
5402 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5403 enum rtx_code code
, bool fetch_after
,
5404 bool ignore
, enum built_in_function ext_call
)
5407 enum memmodel model
;
5411 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5413 /* Expand the operands. */
5414 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5415 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5417 /* Only try generating instructions if inlining is turned on. */
5418 if (flag_inline_atomics
)
5420 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5425 /* Return if a different routine isn't needed for the library call. */
5426 if (ext_call
== BUILT_IN_NONE
)
5429 /* Change the call to the specified function. */
5430 fndecl
= get_callee_fndecl (exp
);
5431 addr
= CALL_EXPR_FN (exp
);
5434 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5435 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5437 /* Expand the call here so we can emit trailing code. */
5438 ret
= expand_call (exp
, target
, ignore
);
5440 /* Replace the original function just in case it matters. */
5441 TREE_OPERAND (addr
, 0) = fndecl
;
5443 /* Then issue the arithmetic correction to return the right result. */
5448 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5450 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5453 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5460 #ifndef HAVE_atomic_clear
5461 # define HAVE_atomic_clear 0
5462 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5465 /* Expand an atomic clear operation.
5466 void _atomic_clear (BOOL *obj, enum memmodel)
5467 EXP is the call expression. */
5470 expand_builtin_atomic_clear (tree exp
)
5472 enum machine_mode mode
;
5474 enum memmodel model
;
5476 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5477 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5478 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5480 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5481 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5483 error ("invalid memory model for %<__atomic_store%>");
5487 if (HAVE_atomic_clear
)
5489 emit_insn (gen_atomic_clear (mem
, model
));
5493 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5494 Failing that, a store is issued by __atomic_store. The only way this can
5495 fail is if the bool type is larger than a word size. Unlikely, but
5496 handle it anyway for completeness. Assume a single threaded model since
5497 there is no atomic support in this case, and no barriers are required. */
5498 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5500 emit_move_insn (mem
, const0_rtx
);
5504 /* Expand an atomic test_and_set operation.
5505 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5506 EXP is the call expression. */
5509 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5512 enum memmodel model
;
5513 enum machine_mode mode
;
5515 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5516 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5517 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5519 return expand_atomic_test_and_set (target
, mem
, model
);
5523 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5524 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5527 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5530 enum machine_mode mode
;
5531 unsigned int mode_align
, type_align
;
5533 if (TREE_CODE (arg0
) != INTEGER_CST
)
5536 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5537 mode
= mode_for_size (size
, MODE_INT
, 0);
5538 mode_align
= GET_MODE_ALIGNMENT (mode
);
5540 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5541 type_align
= mode_align
;
5544 tree ttype
= TREE_TYPE (arg1
);
5546 /* This function is usually invoked and folded immediately by the front
5547 end before anything else has a chance to look at it. The pointer
5548 parameter at this point is usually cast to a void *, so check for that
5549 and look past the cast. */
5550 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5551 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5552 arg1
= TREE_OPERAND (arg1
, 0);
5554 ttype
= TREE_TYPE (arg1
);
5555 gcc_assert (POINTER_TYPE_P (ttype
));
5557 /* Get the underlying type of the object. */
5558 ttype
= TREE_TYPE (ttype
);
5559 type_align
= TYPE_ALIGN (ttype
);
5562 /* If the object has smaller alignment, the the lock free routines cannot
5564 if (type_align
< mode_align
)
5565 return boolean_false_node
;
5567 /* Check if a compare_and_swap pattern exists for the mode which represents
5568 the required size. The pattern is not allowed to fail, so the existence
5569 of the pattern indicates support is present. */
5570 if (can_compare_and_swap_p (mode
, true))
5571 return boolean_true_node
;
5573 return boolean_false_node
;
5576 /* Return true if the parameters to call EXP represent an object which will
5577 always generate lock free instructions. The first argument represents the
5578 size of the object, and the second parameter is a pointer to the object
5579 itself. If NULL is passed for the object, then the result is based on
5580 typical alignment for an object of the specified size. Otherwise return
5584 expand_builtin_atomic_always_lock_free (tree exp
)
5587 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5588 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5590 if (TREE_CODE (arg0
) != INTEGER_CST
)
5592 error ("non-constant argument 1 to __atomic_always_lock_free");
5596 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5597 if (size
== boolean_true_node
)
5602 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5603 is lock free on this architecture. */
5606 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5608 if (!flag_inline_atomics
)
5611 /* If it isn't always lock free, don't generate a result. */
5612 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5613 return boolean_true_node
;
5618 /* Return true if the parameters to call EXP represent an object which will
5619 always generate lock free instructions. The first argument represents the
5620 size of the object, and the second parameter is a pointer to the object
5621 itself. If NULL is passed for the object, then the result is based on
5622 typical alignment for an object of the specified size. Otherwise return
5626 expand_builtin_atomic_is_lock_free (tree exp
)
5629 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5630 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5632 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5634 error ("non-integer argument 1 to __atomic_is_lock_free");
5638 if (!flag_inline_atomics
)
5641 /* If the value is known at compile time, return the RTX for it. */
5642 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5643 if (size
== boolean_true_node
)
5649 /* Expand the __atomic_thread_fence intrinsic:
5650 void __atomic_thread_fence (enum memmodel)
5651 EXP is the CALL_EXPR. */
5654 expand_builtin_atomic_thread_fence (tree exp
)
5656 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5657 expand_mem_thread_fence (model
);
5660 /* Expand the __atomic_signal_fence intrinsic:
5661 void __atomic_signal_fence (enum memmodel)
5662 EXP is the CALL_EXPR. */
5665 expand_builtin_atomic_signal_fence (tree exp
)
5667 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5668 expand_mem_signal_fence (model
);
5671 /* Expand the __sync_synchronize intrinsic. */
5674 expand_builtin_sync_synchronize (void)
5676 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5680 expand_builtin_thread_pointer (tree exp
, rtx target
)
5682 enum insn_code icode
;
5683 if (!validate_arglist (exp
, VOID_TYPE
))
5685 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5686 if (icode
!= CODE_FOR_nothing
)
5688 struct expand_operand op
;
5689 /* If the target is not sutitable then create a new target. */
5690 if (target
== NULL_RTX
5692 || GET_MODE (target
) != Pmode
)
5693 target
= gen_reg_rtx (Pmode
);
5694 create_output_operand (&op
, target
, Pmode
);
5695 expand_insn (icode
, 1, &op
);
5698 error ("__builtin_thread_pointer is not supported on this target");
5703 expand_builtin_set_thread_pointer (tree exp
)
5705 enum insn_code icode
;
5706 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5708 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5709 if (icode
!= CODE_FOR_nothing
)
5711 struct expand_operand op
;
5712 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5713 Pmode
, EXPAND_NORMAL
);
5714 create_input_operand (&op
, val
, Pmode
);
5715 expand_insn (icode
, 1, &op
);
5718 error ("__builtin_set_thread_pointer is not supported on this target");
5722 /* Emit code to restore the current value of stack. */
5725 expand_stack_restore (tree var
)
5728 rtx sa
= expand_normal (var
);
5730 sa
= convert_memory_address (Pmode
, sa
);
5732 prev
= get_last_insn ();
5733 emit_stack_restore (SAVE_BLOCK
, sa
);
5734 fixup_args_size_notes (prev
, get_last_insn (), 0);
5738 /* Emit code to save the current value of stack. */
5741 expand_stack_save (void)
5745 do_pending_stack_adjust ();
5746 emit_stack_save (SAVE_BLOCK
, &ret
);
5750 /* Expand an expression EXP that calls a built-in function,
5751 with result going to TARGET if that's convenient
5752 (and in mode MODE if that's convenient).
5753 SUBTARGET may be used as the target for computing one of EXP's operands.
5754 IGNORE is nonzero if the value is to be ignored. */
5757 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5760 tree fndecl
= get_callee_fndecl (exp
);
5761 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5762 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5765 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5766 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5768 /* When not optimizing, generate calls to library functions for a certain
5771 && !called_as_built_in (fndecl
)
5772 && fcode
!= BUILT_IN_FORK
5773 && fcode
!= BUILT_IN_EXECL
5774 && fcode
!= BUILT_IN_EXECV
5775 && fcode
!= BUILT_IN_EXECLP
5776 && fcode
!= BUILT_IN_EXECLE
5777 && fcode
!= BUILT_IN_EXECVP
5778 && fcode
!= BUILT_IN_EXECVE
5779 && fcode
!= BUILT_IN_ALLOCA
5780 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5781 && fcode
!= BUILT_IN_FREE
)
5782 return expand_call (exp
, target
, ignore
);
5784 /* The built-in function expanders test for target == const0_rtx
5785 to determine whether the function's result will be ignored. */
5787 target
= const0_rtx
;
5789 /* If the result of a pure or const built-in function is ignored, and
5790 none of its arguments are volatile, we can avoid expanding the
5791 built-in call and just evaluate the arguments for side-effects. */
5792 if (target
== const0_rtx
5793 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5794 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5796 bool volatilep
= false;
5798 call_expr_arg_iterator iter
;
5800 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5801 if (TREE_THIS_VOLATILE (arg
))
5809 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5810 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5817 CASE_FLT_FN (BUILT_IN_FABS
):
5818 case BUILT_IN_FABSD32
:
5819 case BUILT_IN_FABSD64
:
5820 case BUILT_IN_FABSD128
:
5821 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5826 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5827 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5832 /* Just do a normal library call if we were unable to fold
5834 CASE_FLT_FN (BUILT_IN_CABS
):
5837 CASE_FLT_FN (BUILT_IN_EXP
):
5838 CASE_FLT_FN (BUILT_IN_EXP10
):
5839 CASE_FLT_FN (BUILT_IN_POW10
):
5840 CASE_FLT_FN (BUILT_IN_EXP2
):
5841 CASE_FLT_FN (BUILT_IN_EXPM1
):
5842 CASE_FLT_FN (BUILT_IN_LOGB
):
5843 CASE_FLT_FN (BUILT_IN_LOG
):
5844 CASE_FLT_FN (BUILT_IN_LOG10
):
5845 CASE_FLT_FN (BUILT_IN_LOG2
):
5846 CASE_FLT_FN (BUILT_IN_LOG1P
):
5847 CASE_FLT_FN (BUILT_IN_TAN
):
5848 CASE_FLT_FN (BUILT_IN_ASIN
):
5849 CASE_FLT_FN (BUILT_IN_ACOS
):
5850 CASE_FLT_FN (BUILT_IN_ATAN
):
5851 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5852 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5853 because of possible accuracy problems. */
5854 if (! flag_unsafe_math_optimizations
)
5856 CASE_FLT_FN (BUILT_IN_SQRT
):
5857 CASE_FLT_FN (BUILT_IN_FLOOR
):
5858 CASE_FLT_FN (BUILT_IN_CEIL
):
5859 CASE_FLT_FN (BUILT_IN_TRUNC
):
5860 CASE_FLT_FN (BUILT_IN_ROUND
):
5861 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5862 CASE_FLT_FN (BUILT_IN_RINT
):
5863 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5868 CASE_FLT_FN (BUILT_IN_FMA
):
5869 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5874 CASE_FLT_FN (BUILT_IN_ILOGB
):
5875 if (! flag_unsafe_math_optimizations
)
5877 CASE_FLT_FN (BUILT_IN_ISINF
):
5878 CASE_FLT_FN (BUILT_IN_FINITE
):
5879 case BUILT_IN_ISFINITE
:
5880 case BUILT_IN_ISNORMAL
:
5881 target
= expand_builtin_interclass_mathfn (exp
, target
);
5886 CASE_FLT_FN (BUILT_IN_ICEIL
):
5887 CASE_FLT_FN (BUILT_IN_LCEIL
):
5888 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5889 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5890 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5891 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5892 target
= expand_builtin_int_roundingfn (exp
, target
);
5897 CASE_FLT_FN (BUILT_IN_IRINT
):
5898 CASE_FLT_FN (BUILT_IN_LRINT
):
5899 CASE_FLT_FN (BUILT_IN_LLRINT
):
5900 CASE_FLT_FN (BUILT_IN_IROUND
):
5901 CASE_FLT_FN (BUILT_IN_LROUND
):
5902 CASE_FLT_FN (BUILT_IN_LLROUND
):
5903 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5908 CASE_FLT_FN (BUILT_IN_POWI
):
5909 target
= expand_builtin_powi (exp
, target
);
5914 CASE_FLT_FN (BUILT_IN_ATAN2
):
5915 CASE_FLT_FN (BUILT_IN_LDEXP
):
5916 CASE_FLT_FN (BUILT_IN_SCALB
):
5917 CASE_FLT_FN (BUILT_IN_SCALBN
):
5918 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5919 if (! flag_unsafe_math_optimizations
)
5922 CASE_FLT_FN (BUILT_IN_FMOD
):
5923 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5924 CASE_FLT_FN (BUILT_IN_DREM
):
5925 CASE_FLT_FN (BUILT_IN_POW
):
5926 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5931 CASE_FLT_FN (BUILT_IN_CEXPI
):
5932 target
= expand_builtin_cexpi (exp
, target
);
5933 gcc_assert (target
);
5936 CASE_FLT_FN (BUILT_IN_SIN
):
5937 CASE_FLT_FN (BUILT_IN_COS
):
5938 if (! flag_unsafe_math_optimizations
)
5940 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5945 CASE_FLT_FN (BUILT_IN_SINCOS
):
5946 if (! flag_unsafe_math_optimizations
)
5948 target
= expand_builtin_sincos (exp
);
5953 case BUILT_IN_APPLY_ARGS
:
5954 return expand_builtin_apply_args ();
5956 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5957 FUNCTION with a copy of the parameters described by
5958 ARGUMENTS, and ARGSIZE. It returns a block of memory
5959 allocated on the stack into which is stored all the registers
5960 that might possibly be used for returning the result of a
5961 function. ARGUMENTS is the value returned by
5962 __builtin_apply_args. ARGSIZE is the number of bytes of
5963 arguments that must be copied. ??? How should this value be
5964 computed? We'll also need a safe worst case value for varargs
5966 case BUILT_IN_APPLY
:
5967 if (!validate_arglist (exp
, POINTER_TYPE
,
5968 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5969 && !validate_arglist (exp
, REFERENCE_TYPE
,
5970 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5976 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5977 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5978 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5980 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5983 /* __builtin_return (RESULT) causes the function to return the
5984 value described by RESULT. RESULT is address of the block of
5985 memory returned by __builtin_apply. */
5986 case BUILT_IN_RETURN
:
5987 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5988 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
5991 case BUILT_IN_SAVEREGS
:
5992 return expand_builtin_saveregs ();
5994 case BUILT_IN_VA_ARG_PACK
:
5995 /* All valid uses of __builtin_va_arg_pack () are removed during
5997 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6000 case BUILT_IN_VA_ARG_PACK_LEN
:
6001 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6003 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6006 /* Return the address of the first anonymous stack arg. */
6007 case BUILT_IN_NEXT_ARG
:
6008 if (fold_builtin_next_arg (exp
, false))
6010 return expand_builtin_next_arg ();
6012 case BUILT_IN_CLEAR_CACHE
:
6013 target
= expand_builtin___clear_cache (exp
);
6018 case BUILT_IN_CLASSIFY_TYPE
:
6019 return expand_builtin_classify_type (exp
);
6021 case BUILT_IN_CONSTANT_P
:
6024 case BUILT_IN_FRAME_ADDRESS
:
6025 case BUILT_IN_RETURN_ADDRESS
:
6026 return expand_builtin_frame_address (fndecl
, exp
);
6028 /* Returns the address of the area where the structure is returned.
6030 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6031 if (call_expr_nargs (exp
) != 0
6032 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6033 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6036 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6038 case BUILT_IN_ALLOCA
:
6039 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6040 /* If the allocation stems from the declaration of a variable-sized
6041 object, it cannot accumulate. */
6042 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6047 case BUILT_IN_STACK_SAVE
:
6048 return expand_stack_save ();
6050 case BUILT_IN_STACK_RESTORE
:
6051 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6054 case BUILT_IN_BSWAP16
:
6055 case BUILT_IN_BSWAP32
:
6056 case BUILT_IN_BSWAP64
:
6057 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6062 CASE_INT_FN (BUILT_IN_FFS
):
6063 target
= expand_builtin_unop (target_mode
, exp
, target
,
6064 subtarget
, ffs_optab
);
6069 CASE_INT_FN (BUILT_IN_CLZ
):
6070 target
= expand_builtin_unop (target_mode
, exp
, target
,
6071 subtarget
, clz_optab
);
6076 CASE_INT_FN (BUILT_IN_CTZ
):
6077 target
= expand_builtin_unop (target_mode
, exp
, target
,
6078 subtarget
, ctz_optab
);
6083 CASE_INT_FN (BUILT_IN_CLRSB
):
6084 target
= expand_builtin_unop (target_mode
, exp
, target
,
6085 subtarget
, clrsb_optab
);
6090 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6091 target
= expand_builtin_unop (target_mode
, exp
, target
,
6092 subtarget
, popcount_optab
);
6097 CASE_INT_FN (BUILT_IN_PARITY
):
6098 target
= expand_builtin_unop (target_mode
, exp
, target
,
6099 subtarget
, parity_optab
);
6104 case BUILT_IN_STRLEN
:
6105 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6110 case BUILT_IN_STRCPY
:
6111 target
= expand_builtin_strcpy (exp
, target
);
6116 case BUILT_IN_STRNCPY
:
6117 target
= expand_builtin_strncpy (exp
, target
);
6122 case BUILT_IN_STPCPY
:
6123 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6128 case BUILT_IN_MEMCPY
:
6129 target
= expand_builtin_memcpy (exp
, target
);
6134 case BUILT_IN_MEMPCPY
:
6135 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6140 case BUILT_IN_MEMSET
:
6141 target
= expand_builtin_memset (exp
, target
, mode
);
6146 case BUILT_IN_BZERO
:
6147 target
= expand_builtin_bzero (exp
);
6152 case BUILT_IN_STRCMP
:
6153 target
= expand_builtin_strcmp (exp
, target
);
6158 case BUILT_IN_STRNCMP
:
6159 target
= expand_builtin_strncmp (exp
, target
, mode
);
6165 case BUILT_IN_MEMCMP
:
6166 target
= expand_builtin_memcmp (exp
, target
, mode
);
6171 case BUILT_IN_SETJMP
:
6172 /* This should have been lowered to the builtins below. */
6175 case BUILT_IN_SETJMP_SETUP
:
6176 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6177 and the receiver label. */
6178 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6180 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6181 VOIDmode
, EXPAND_NORMAL
);
6182 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6183 rtx label_r
= label_rtx (label
);
6185 /* This is copied from the handling of non-local gotos. */
6186 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6187 nonlocal_goto_handler_labels
6188 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6189 nonlocal_goto_handler_labels
);
6190 /* ??? Do not let expand_label treat us as such since we would
6191 not want to be both on the list of non-local labels and on
6192 the list of forced labels. */
6193 FORCED_LABEL (label
) = 0;
6198 case BUILT_IN_SETJMP_RECEIVER
:
6199 /* __builtin_setjmp_receiver is passed the receiver label. */
6200 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6202 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6203 rtx label_r
= label_rtx (label
);
6205 expand_builtin_setjmp_receiver (label_r
);
6210 /* __builtin_longjmp is passed a pointer to an array of five words.
6211 It's similar to the C library longjmp function but works with
6212 __builtin_setjmp above. */
6213 case BUILT_IN_LONGJMP
:
6214 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6216 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6217 VOIDmode
, EXPAND_NORMAL
);
6218 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6220 if (value
!= const1_rtx
)
6222 error ("%<__builtin_longjmp%> second argument must be 1");
6226 expand_builtin_longjmp (buf_addr
, value
);
6231 case BUILT_IN_NONLOCAL_GOTO
:
6232 target
= expand_builtin_nonlocal_goto (exp
);
6237 /* This updates the setjmp buffer that is its argument with the value
6238 of the current stack pointer. */
6239 case BUILT_IN_UPDATE_SETJMP_BUF
:
6240 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6243 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6245 expand_builtin_update_setjmp_buf (buf_addr
);
6251 expand_builtin_trap ();
6254 case BUILT_IN_UNREACHABLE
:
6255 expand_builtin_unreachable ();
6258 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6259 case BUILT_IN_SIGNBITD32
:
6260 case BUILT_IN_SIGNBITD64
:
6261 case BUILT_IN_SIGNBITD128
:
6262 target
= expand_builtin_signbit (exp
, target
);
6267 /* Various hooks for the DWARF 2 __throw routine. */
6268 case BUILT_IN_UNWIND_INIT
:
6269 expand_builtin_unwind_init ();
6271 case BUILT_IN_DWARF_CFA
:
6272 return virtual_cfa_rtx
;
6273 #ifdef DWARF2_UNWIND_INFO
6274 case BUILT_IN_DWARF_SP_COLUMN
:
6275 return expand_builtin_dwarf_sp_column ();
6276 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6277 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6280 case BUILT_IN_FROB_RETURN_ADDR
:
6281 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6282 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6283 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6284 case BUILT_IN_EH_RETURN
:
6285 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6286 CALL_EXPR_ARG (exp
, 1));
6288 #ifdef EH_RETURN_DATA_REGNO
6289 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6290 return expand_builtin_eh_return_data_regno (exp
);
6292 case BUILT_IN_EXTEND_POINTER
:
6293 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6294 case BUILT_IN_EH_POINTER
:
6295 return expand_builtin_eh_pointer (exp
);
6296 case BUILT_IN_EH_FILTER
:
6297 return expand_builtin_eh_filter (exp
);
6298 case BUILT_IN_EH_COPY_VALUES
:
6299 return expand_builtin_eh_copy_values (exp
);
6301 case BUILT_IN_VA_START
:
6302 return expand_builtin_va_start (exp
);
6303 case BUILT_IN_VA_END
:
6304 return expand_builtin_va_end (exp
);
6305 case BUILT_IN_VA_COPY
:
6306 return expand_builtin_va_copy (exp
);
6307 case BUILT_IN_EXPECT
:
6308 return expand_builtin_expect (exp
, target
);
6309 case BUILT_IN_ASSUME_ALIGNED
:
6310 return expand_builtin_assume_aligned (exp
, target
);
6311 case BUILT_IN_PREFETCH
:
6312 expand_builtin_prefetch (exp
);
6315 case BUILT_IN_INIT_TRAMPOLINE
:
6316 return expand_builtin_init_trampoline (exp
, true);
6317 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6318 return expand_builtin_init_trampoline (exp
, false);
6319 case BUILT_IN_ADJUST_TRAMPOLINE
:
6320 return expand_builtin_adjust_trampoline (exp
);
6323 case BUILT_IN_EXECL
:
6324 case BUILT_IN_EXECV
:
6325 case BUILT_IN_EXECLP
:
6326 case BUILT_IN_EXECLE
:
6327 case BUILT_IN_EXECVP
:
6328 case BUILT_IN_EXECVE
:
6329 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6334 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6335 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6336 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6337 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6338 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6339 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6340 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6345 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6346 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6347 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6348 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6349 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6350 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6351 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6356 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6357 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6358 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6359 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6360 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6361 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6362 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6367 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6368 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6369 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6370 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6371 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6372 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6373 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6378 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6379 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6380 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6381 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6382 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6383 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6384 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6389 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6390 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6391 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6392 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6393 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6394 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6395 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6400 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6401 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6402 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6403 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6404 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6405 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6406 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6411 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6412 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6413 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6414 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6415 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6416 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6417 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6422 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6423 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6424 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6425 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6426 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6427 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6428 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6433 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6434 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6435 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6436 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6437 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6438 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6439 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6444 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6445 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6446 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6447 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6448 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6449 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6450 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6455 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6456 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6457 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6458 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6459 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6460 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6461 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6466 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6467 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6468 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6469 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6470 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6471 if (mode
== VOIDmode
)
6472 mode
= TYPE_MODE (boolean_type_node
);
6473 if (!target
|| !register_operand (target
, mode
))
6474 target
= gen_reg_rtx (mode
);
6476 mode
= get_builtin_sync_mode
6477 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6478 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6483 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6484 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6485 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6486 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6487 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6488 mode
= get_builtin_sync_mode
6489 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6490 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6495 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6496 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6497 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6498 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6499 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6500 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6501 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6506 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6507 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6508 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6509 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6510 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6511 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6512 expand_builtin_sync_lock_release (mode
, exp
);
6515 case BUILT_IN_SYNC_SYNCHRONIZE
:
6516 expand_builtin_sync_synchronize ();
6519 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6520 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6521 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6522 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6523 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6524 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6525 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6530 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6531 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6532 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6533 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6534 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6536 unsigned int nargs
, z
;
6537 vec
<tree
, va_gc
> *vec
;
6540 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6541 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6545 /* If this is turned into an external library call, the weak parameter
6546 must be dropped to match the expected parameter list. */
6547 nargs
= call_expr_nargs (exp
);
6548 vec_alloc (vec
, nargs
- 1);
6549 for (z
= 0; z
< 3; z
++)
6550 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6551 /* Skip the boolean weak parameter. */
6552 for (z
= 4; z
< 6; z
++)
6553 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6554 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6558 case BUILT_IN_ATOMIC_LOAD_1
:
6559 case BUILT_IN_ATOMIC_LOAD_2
:
6560 case BUILT_IN_ATOMIC_LOAD_4
:
6561 case BUILT_IN_ATOMIC_LOAD_8
:
6562 case BUILT_IN_ATOMIC_LOAD_16
:
6563 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6564 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6569 case BUILT_IN_ATOMIC_STORE_1
:
6570 case BUILT_IN_ATOMIC_STORE_2
:
6571 case BUILT_IN_ATOMIC_STORE_4
:
6572 case BUILT_IN_ATOMIC_STORE_8
:
6573 case BUILT_IN_ATOMIC_STORE_16
:
6574 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6575 target
= expand_builtin_atomic_store (mode
, exp
);
6580 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6581 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6582 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6583 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6584 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6586 enum built_in_function lib
;
6587 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6588 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6589 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6590 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6596 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6597 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6598 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6599 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6600 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6602 enum built_in_function lib
;
6603 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6604 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6605 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6606 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6612 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6613 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6614 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6615 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6616 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6618 enum built_in_function lib
;
6619 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6620 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6621 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6622 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6628 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6629 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6630 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6631 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6632 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6634 enum built_in_function lib
;
6635 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6636 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6637 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6638 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6644 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6645 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6646 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6647 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6648 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6650 enum built_in_function lib
;
6651 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6652 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6653 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6654 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6660 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6661 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6662 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6663 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6664 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6666 enum built_in_function lib
;
6667 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6668 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6669 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6670 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6676 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6677 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6678 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6679 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6680 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6681 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6682 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6683 ignore
, BUILT_IN_NONE
);
6688 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6689 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6690 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6691 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6692 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6693 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6694 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6695 ignore
, BUILT_IN_NONE
);
6700 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6701 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6702 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6703 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6704 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6705 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6706 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6707 ignore
, BUILT_IN_NONE
);
6712 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6713 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6714 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6715 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6716 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6717 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6718 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6719 ignore
, BUILT_IN_NONE
);
6724 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6725 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6726 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6727 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6728 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6729 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6730 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6731 ignore
, BUILT_IN_NONE
);
6736 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6737 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6738 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6739 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6740 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6741 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6742 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6743 ignore
, BUILT_IN_NONE
);
6748 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6749 return expand_builtin_atomic_test_and_set (exp
, target
);
6751 case BUILT_IN_ATOMIC_CLEAR
:
6752 return expand_builtin_atomic_clear (exp
);
6754 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6755 return expand_builtin_atomic_always_lock_free (exp
);
6757 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6758 target
= expand_builtin_atomic_is_lock_free (exp
);
6763 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6764 expand_builtin_atomic_thread_fence (exp
);
6767 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6768 expand_builtin_atomic_signal_fence (exp
);
6771 case BUILT_IN_OBJECT_SIZE
:
6772 return expand_builtin_object_size (exp
);
6774 case BUILT_IN_MEMCPY_CHK
:
6775 case BUILT_IN_MEMPCPY_CHK
:
6776 case BUILT_IN_MEMMOVE_CHK
:
6777 case BUILT_IN_MEMSET_CHK
:
6778 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6783 case BUILT_IN_STRCPY_CHK
:
6784 case BUILT_IN_STPCPY_CHK
:
6785 case BUILT_IN_STRNCPY_CHK
:
6786 case BUILT_IN_STPNCPY_CHK
:
6787 case BUILT_IN_STRCAT_CHK
:
6788 case BUILT_IN_STRNCAT_CHK
:
6789 case BUILT_IN_SNPRINTF_CHK
:
6790 case BUILT_IN_VSNPRINTF_CHK
:
6791 maybe_emit_chk_warning (exp
, fcode
);
6794 case BUILT_IN_SPRINTF_CHK
:
6795 case BUILT_IN_VSPRINTF_CHK
:
6796 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6800 if (warn_free_nonheap_object
)
6801 maybe_emit_free_warning (exp
);
6804 case BUILT_IN_THREAD_POINTER
:
6805 return expand_builtin_thread_pointer (exp
, target
);
6807 case BUILT_IN_SET_THREAD_POINTER
:
6808 expand_builtin_set_thread_pointer (exp
);
6811 case BUILT_IN_CILK_DETACH
:
6812 expand_builtin_cilk_detach (exp
);
6815 case BUILT_IN_CILK_POP_FRAME
:
6816 expand_builtin_cilk_pop_frame (exp
);
6819 default: /* just do library call, if unknown builtin */
6823 /* The switch statement above can drop through to cause the function
6824 to be called normally. */
6825 return expand_call (exp
, target
, ignore
);
6828 /* Determine whether a tree node represents a call to a built-in
6829 function. If the tree T is a call to a built-in function with
6830 the right number of arguments of the appropriate types, return
6831 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6832 Otherwise the return value is END_BUILTINS. */
6834 enum built_in_function
6835 builtin_mathfn_code (const_tree t
)
6837 const_tree fndecl
, arg
, parmlist
;
6838 const_tree argtype
, parmtype
;
6839 const_call_expr_arg_iterator iter
;
6841 if (TREE_CODE (t
) != CALL_EXPR
6842 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6843 return END_BUILTINS
;
6845 fndecl
= get_callee_fndecl (t
);
6846 if (fndecl
== NULL_TREE
6847 || TREE_CODE (fndecl
) != FUNCTION_DECL
6848 || ! DECL_BUILT_IN (fndecl
)
6849 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6850 return END_BUILTINS
;
6852 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6853 init_const_call_expr_arg_iterator (t
, &iter
);
6854 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6856 /* If a function doesn't take a variable number of arguments,
6857 the last element in the list will have type `void'. */
6858 parmtype
= TREE_VALUE (parmlist
);
6859 if (VOID_TYPE_P (parmtype
))
6861 if (more_const_call_expr_args_p (&iter
))
6862 return END_BUILTINS
;
6863 return DECL_FUNCTION_CODE (fndecl
);
6866 if (! more_const_call_expr_args_p (&iter
))
6867 return END_BUILTINS
;
6869 arg
= next_const_call_expr_arg (&iter
);
6870 argtype
= TREE_TYPE (arg
);
6872 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6874 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6875 return END_BUILTINS
;
6877 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6879 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6880 return END_BUILTINS
;
6882 else if (POINTER_TYPE_P (parmtype
))
6884 if (! POINTER_TYPE_P (argtype
))
6885 return END_BUILTINS
;
6887 else if (INTEGRAL_TYPE_P (parmtype
))
6889 if (! INTEGRAL_TYPE_P (argtype
))
6890 return END_BUILTINS
;
6893 return END_BUILTINS
;
6896 /* Variable-length argument list. */
6897 return DECL_FUNCTION_CODE (fndecl
);
6900 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6901 evaluate to a constant. */
6904 fold_builtin_constant_p (tree arg
)
6906 /* We return 1 for a numeric type that's known to be a constant
6907 value at compile-time or for an aggregate type that's a
6908 literal constant. */
6911 /* If we know this is a constant, emit the constant of one. */
6912 if (CONSTANT_CLASS_P (arg
)
6913 || (TREE_CODE (arg
) == CONSTRUCTOR
6914 && TREE_CONSTANT (arg
)))
6915 return integer_one_node
;
6916 if (TREE_CODE (arg
) == ADDR_EXPR
)
6918 tree op
= TREE_OPERAND (arg
, 0);
6919 if (TREE_CODE (op
) == STRING_CST
6920 || (TREE_CODE (op
) == ARRAY_REF
6921 && integer_zerop (TREE_OPERAND (op
, 1))
6922 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6923 return integer_one_node
;
6926 /* If this expression has side effects, show we don't know it to be a
6927 constant. Likewise if it's a pointer or aggregate type since in
6928 those case we only want literals, since those are only optimized
6929 when generating RTL, not later.
6930 And finally, if we are compiling an initializer, not code, we
6931 need to return a definite result now; there's not going to be any
6932 more optimization done. */
6933 if (TREE_SIDE_EFFECTS (arg
)
6934 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6935 || POINTER_TYPE_P (TREE_TYPE (arg
))
6937 || folding_initializer
6938 || force_folding_builtin_constant_p
)
6939 return integer_zero_node
;
6944 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6945 return it as a truthvalue. */
6948 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
6951 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6953 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6954 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6955 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6956 pred_type
= TREE_VALUE (arg_types
);
6957 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6959 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6960 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6961 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
6964 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6965 build_int_cst (ret_type
, 0));
6968 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6969 NULL_TREE if no simplification is possible. */
6972 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
6974 tree inner
, fndecl
, inner_arg0
;
6975 enum tree_code code
;
6977 /* Distribute the expected value over short-circuiting operators.
6978 See through the cast from truthvalue_type_node to long. */
6980 while (TREE_CODE (inner_arg0
) == NOP_EXPR
6981 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
6982 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
6983 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
6985 /* If this is a builtin_expect within a builtin_expect keep the
6986 inner one. See through a comparison against a constant. It
6987 might have been added to create a thruthvalue. */
6990 if (COMPARISON_CLASS_P (inner
)
6991 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6992 inner
= TREE_OPERAND (inner
, 0);
6994 if (TREE_CODE (inner
) == CALL_EXPR
6995 && (fndecl
= get_callee_fndecl (inner
))
6996 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6997 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7001 code
= TREE_CODE (inner
);
7002 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7004 tree op0
= TREE_OPERAND (inner
, 0);
7005 tree op1
= TREE_OPERAND (inner
, 1);
7007 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7008 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7009 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7011 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7014 /* If the argument isn't invariant then there's nothing else we can do. */
7015 if (!TREE_CONSTANT (inner_arg0
))
7018 /* If we expect that a comparison against the argument will fold to
7019 a constant return the constant. In practice, this means a true
7020 constant or the address of a non-weak symbol. */
7023 if (TREE_CODE (inner
) == ADDR_EXPR
)
7027 inner
= TREE_OPERAND (inner
, 0);
7029 while (TREE_CODE (inner
) == COMPONENT_REF
7030 || TREE_CODE (inner
) == ARRAY_REF
);
7031 if ((TREE_CODE (inner
) == VAR_DECL
7032 || TREE_CODE (inner
) == FUNCTION_DECL
)
7033 && DECL_WEAK (inner
))
7037 /* Otherwise, ARG0 already has the proper type for the return value. */
7041 /* Fold a call to __builtin_classify_type with argument ARG. */
7044 fold_builtin_classify_type (tree arg
)
7047 return build_int_cst (integer_type_node
, no_type_class
);
7049 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7052 /* Fold a call to __builtin_strlen with argument ARG. */
7055 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7057 if (!validate_arg (arg
, POINTER_TYPE
))
7061 tree len
= c_strlen (arg
, 0);
7064 return fold_convert_loc (loc
, type
, len
);
7070 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7073 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7075 REAL_VALUE_TYPE real
;
7077 /* __builtin_inff is intended to be usable to define INFINITY on all
7078 targets. If an infinity is not available, INFINITY expands "to a
7079 positive constant of type float that overflows at translation
7080 time", footnote "In this case, using INFINITY will violate the
7081 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7082 Thus we pedwarn to ensure this constraint violation is
7084 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7085 pedwarn (loc
, 0, "target format does not support infinity");
7088 return build_real (type
, real
);
7091 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7094 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7096 REAL_VALUE_TYPE real
;
7099 if (!validate_arg (arg
, POINTER_TYPE
))
7101 str
= c_getstr (arg
);
7105 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7108 return build_real (type
, real
);
7111 /* Return true if the floating point expression T has an integer value.
7112 We also allow +Inf, -Inf and NaN to be considered integer values. */
7115 integer_valued_real_p (tree t
)
7117 switch (TREE_CODE (t
))
7124 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7129 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7136 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7137 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7140 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7141 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7144 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7148 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7149 if (TREE_CODE (type
) == INTEGER_TYPE
)
7151 if (TREE_CODE (type
) == REAL_TYPE
)
7152 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7157 switch (builtin_mathfn_code (t
))
7159 CASE_FLT_FN (BUILT_IN_CEIL
):
7160 CASE_FLT_FN (BUILT_IN_FLOOR
):
7161 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7162 CASE_FLT_FN (BUILT_IN_RINT
):
7163 CASE_FLT_FN (BUILT_IN_ROUND
):
7164 CASE_FLT_FN (BUILT_IN_TRUNC
):
7167 CASE_FLT_FN (BUILT_IN_FMIN
):
7168 CASE_FLT_FN (BUILT_IN_FMAX
):
7169 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7170 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7183 /* FNDECL is assumed to be a builtin where truncation can be propagated
7184 across (for instance floor((double)f) == (double)floorf (f).
7185 Do the transformation for a call with argument ARG. */
7188 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7190 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7192 if (!validate_arg (arg
, REAL_TYPE
))
7195 /* Integer rounding functions are idempotent. */
7196 if (fcode
== builtin_mathfn_code (arg
))
7199 /* If argument is already integer valued, and we don't need to worry
7200 about setting errno, there's no need to perform rounding. */
7201 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7206 tree arg0
= strip_float_extensions (arg
);
7207 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7208 tree newtype
= TREE_TYPE (arg0
);
7211 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7212 && (decl
= mathfn_built_in (newtype
, fcode
)))
7213 return fold_convert_loc (loc
, ftype
,
7214 build_call_expr_loc (loc
, decl
, 1,
7215 fold_convert_loc (loc
,
7222 /* FNDECL is assumed to be builtin which can narrow the FP type of
7223 the argument, for instance lround((double)f) -> lroundf (f).
7224 Do the transformation for a call with argument ARG. */
7227 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7229 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7231 if (!validate_arg (arg
, REAL_TYPE
))
7234 /* If argument is already integer valued, and we don't need to worry
7235 about setting errno, there's no need to perform rounding. */
7236 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7237 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7238 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7242 tree ftype
= TREE_TYPE (arg
);
7243 tree arg0
= strip_float_extensions (arg
);
7244 tree newtype
= TREE_TYPE (arg0
);
7247 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7248 && (decl
= mathfn_built_in (newtype
, fcode
)))
7249 return build_call_expr_loc (loc
, decl
, 1,
7250 fold_convert_loc (loc
, newtype
, arg0
));
7253 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7254 sizeof (int) == sizeof (long). */
7255 if (TYPE_PRECISION (integer_type_node
)
7256 == TYPE_PRECISION (long_integer_type_node
))
7258 tree newfn
= NULL_TREE
;
7261 CASE_FLT_FN (BUILT_IN_ICEIL
):
7262 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7265 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7266 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7269 CASE_FLT_FN (BUILT_IN_IROUND
):
7270 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7273 CASE_FLT_FN (BUILT_IN_IRINT
):
7274 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7283 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7284 return fold_convert_loc (loc
,
7285 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7289 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7290 sizeof (long long) == sizeof (long). */
7291 if (TYPE_PRECISION (long_long_integer_type_node
)
7292 == TYPE_PRECISION (long_integer_type_node
))
7294 tree newfn
= NULL_TREE
;
7297 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7298 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7301 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7302 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7305 CASE_FLT_FN (BUILT_IN_LLROUND
):
7306 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7309 CASE_FLT_FN (BUILT_IN_LLRINT
):
7310 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7319 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7320 return fold_convert_loc (loc
,
7321 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7328 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7329 return type. Return NULL_TREE if no simplification can be made. */
7332 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7336 if (!validate_arg (arg
, COMPLEX_TYPE
)
7337 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7340 /* Calculate the result when the argument is a constant. */
7341 if (TREE_CODE (arg
) == COMPLEX_CST
7342 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7346 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7348 tree real
= TREE_OPERAND (arg
, 0);
7349 tree imag
= TREE_OPERAND (arg
, 1);
7351 /* If either part is zero, cabs is fabs of the other. */
7352 if (real_zerop (real
))
7353 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7354 if (real_zerop (imag
))
7355 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7357 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7358 if (flag_unsafe_math_optimizations
7359 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7361 const REAL_VALUE_TYPE sqrt2_trunc
7362 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7364 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7365 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7366 build_real (type
, sqrt2_trunc
));
7370 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7371 if (TREE_CODE (arg
) == NEGATE_EXPR
7372 || TREE_CODE (arg
) == CONJ_EXPR
)
7373 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7375 /* Don't do this when optimizing for size. */
7376 if (flag_unsafe_math_optimizations
7377 && optimize
&& optimize_function_for_speed_p (cfun
))
7379 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7381 if (sqrtfn
!= NULL_TREE
)
7383 tree rpart
, ipart
, result
;
7385 arg
= builtin_save_expr (arg
);
7387 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7388 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7390 rpart
= builtin_save_expr (rpart
);
7391 ipart
= builtin_save_expr (ipart
);
7393 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7394 fold_build2_loc (loc
, MULT_EXPR
, type
,
7396 fold_build2_loc (loc
, MULT_EXPR
, type
,
7399 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7406 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7407 complex tree type of the result. If NEG is true, the imaginary
7408 zero is negative. */
7411 build_complex_cproj (tree type
, bool neg
)
7413 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7417 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7418 build_real (TREE_TYPE (type
), rzero
));
7421 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7422 return type. Return NULL_TREE if no simplification can be made. */
7425 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7427 if (!validate_arg (arg
, COMPLEX_TYPE
)
7428 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7431 /* If there are no infinities, return arg. */
7432 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7433 return non_lvalue_loc (loc
, arg
);
7435 /* Calculate the result when the argument is a constant. */
7436 if (TREE_CODE (arg
) == COMPLEX_CST
)
7438 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7439 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7441 if (real_isinf (real
) || real_isinf (imag
))
7442 return build_complex_cproj (type
, imag
->sign
);
7446 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7448 tree real
= TREE_OPERAND (arg
, 0);
7449 tree imag
= TREE_OPERAND (arg
, 1);
7454 /* If the real part is inf and the imag part is known to be
7455 nonnegative, return (inf + 0i). Remember side-effects are
7456 possible in the imag part. */
7457 if (TREE_CODE (real
) == REAL_CST
7458 && real_isinf (TREE_REAL_CST_PTR (real
))
7459 && tree_expr_nonnegative_p (imag
))
7460 return omit_one_operand_loc (loc
, type
,
7461 build_complex_cproj (type
, false),
7464 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7465 Remember side-effects are possible in the real part. */
7466 if (TREE_CODE (imag
) == REAL_CST
7467 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7469 omit_one_operand_loc (loc
, type
,
7470 build_complex_cproj (type
, TREE_REAL_CST_PTR
7471 (imag
)->sign
), arg
);
7477 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7478 Return NULL_TREE if no simplification can be made. */
7481 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7484 enum built_in_function fcode
;
7487 if (!validate_arg (arg
, REAL_TYPE
))
7490 /* Calculate the result when the argument is a constant. */
7491 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7494 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7495 fcode
= builtin_mathfn_code (arg
);
7496 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7498 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7499 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7500 CALL_EXPR_ARG (arg
, 0),
7501 build_real (type
, dconsthalf
));
7502 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7505 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7506 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7508 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7512 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7514 /* The inner root was either sqrt or cbrt. */
7515 /* This was a conditional expression but it triggered a bug
7517 REAL_VALUE_TYPE dconstroot
;
7518 if (BUILTIN_SQRT_P (fcode
))
7519 dconstroot
= dconsthalf
;
7521 dconstroot
= dconst_third ();
7523 /* Adjust for the outer root. */
7524 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7525 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7526 tree_root
= build_real (type
, dconstroot
);
7527 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7531 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7532 if (flag_unsafe_math_optimizations
7533 && (fcode
== BUILT_IN_POW
7534 || fcode
== BUILT_IN_POWF
7535 || fcode
== BUILT_IN_POWL
))
7537 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7538 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7539 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7541 if (!tree_expr_nonnegative_p (arg0
))
7542 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7543 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7544 build_real (type
, dconsthalf
));
7545 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7551 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7552 Return NULL_TREE if no simplification can be made. */
7555 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7557 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7560 if (!validate_arg (arg
, REAL_TYPE
))
7563 /* Calculate the result when the argument is a constant. */
7564 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7567 if (flag_unsafe_math_optimizations
)
7569 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7570 if (BUILTIN_EXPONENT_P (fcode
))
7572 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7573 const REAL_VALUE_TYPE third_trunc
=
7574 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7575 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7576 CALL_EXPR_ARG (arg
, 0),
7577 build_real (type
, third_trunc
));
7578 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7581 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7582 if (BUILTIN_SQRT_P (fcode
))
7584 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7588 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7590 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7592 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7593 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7594 tree_root
= build_real (type
, dconstroot
);
7595 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7599 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7600 if (BUILTIN_CBRT_P (fcode
))
7602 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7603 if (tree_expr_nonnegative_p (arg0
))
7605 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7610 REAL_VALUE_TYPE dconstroot
;
7612 real_arithmetic (&dconstroot
, MULT_EXPR
,
7613 dconst_third_ptr (), dconst_third_ptr ());
7614 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7615 tree_root
= build_real (type
, dconstroot
);
7616 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7621 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7622 if (fcode
== BUILT_IN_POW
7623 || fcode
== BUILT_IN_POWF
7624 || fcode
== BUILT_IN_POWL
)
7626 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7627 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7628 if (tree_expr_nonnegative_p (arg00
))
7630 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7631 const REAL_VALUE_TYPE dconstroot
7632 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7633 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7634 build_real (type
, dconstroot
));
7635 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7642 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7643 TYPE is the type of the return value. Return NULL_TREE if no
7644 simplification can be made. */
7647 fold_builtin_cos (location_t loc
,
7648 tree arg
, tree type
, tree fndecl
)
7652 if (!validate_arg (arg
, REAL_TYPE
))
7655 /* Calculate the result when the argument is a constant. */
7656 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7659 /* Optimize cos(-x) into cos (x). */
7660 if ((narg
= fold_strip_sign_ops (arg
)))
7661 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7666 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7667 Return NULL_TREE if no simplification can be made. */
7670 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7672 if (validate_arg (arg
, REAL_TYPE
))
7676 /* Calculate the result when the argument is a constant. */
7677 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7680 /* Optimize cosh(-x) into cosh (x). */
7681 if ((narg
= fold_strip_sign_ops (arg
)))
7682 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7688 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7689 argument ARG. TYPE is the type of the return value. Return
7690 NULL_TREE if no simplification can be made. */
7693 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7696 if (validate_arg (arg
, COMPLEX_TYPE
)
7697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7701 /* Calculate the result when the argument is a constant. */
7702 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7705 /* Optimize fn(-x) into fn(x). */
7706 if ((tmp
= fold_strip_sign_ops (arg
)))
7707 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7713 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7714 Return NULL_TREE if no simplification can be made. */
7717 fold_builtin_tan (tree arg
, tree type
)
7719 enum built_in_function fcode
;
7722 if (!validate_arg (arg
, REAL_TYPE
))
7725 /* Calculate the result when the argument is a constant. */
7726 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7729 /* Optimize tan(atan(x)) = x. */
7730 fcode
= builtin_mathfn_code (arg
);
7731 if (flag_unsafe_math_optimizations
7732 && (fcode
== BUILT_IN_ATAN
7733 || fcode
== BUILT_IN_ATANF
7734 || fcode
== BUILT_IN_ATANL
))
7735 return CALL_EXPR_ARG (arg
, 0);
7740 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7741 NULL_TREE if no simplification can be made. */
7744 fold_builtin_sincos (location_t loc
,
7745 tree arg0
, tree arg1
, tree arg2
)
7750 if (!validate_arg (arg0
, REAL_TYPE
)
7751 || !validate_arg (arg1
, POINTER_TYPE
)
7752 || !validate_arg (arg2
, POINTER_TYPE
))
7755 type
= TREE_TYPE (arg0
);
7757 /* Calculate the result when the argument is a constant. */
7758 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7761 /* Canonicalize sincos to cexpi. */
7762 if (!targetm
.libc_has_function (function_c99_math_complex
))
7764 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7768 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7769 call
= builtin_save_expr (call
);
7771 return build2 (COMPOUND_EXPR
, void_type_node
,
7772 build2 (MODIFY_EXPR
, void_type_node
,
7773 build_fold_indirect_ref_loc (loc
, arg1
),
7774 build1 (IMAGPART_EXPR
, type
, call
)),
7775 build2 (MODIFY_EXPR
, void_type_node
,
7776 build_fold_indirect_ref_loc (loc
, arg2
),
7777 build1 (REALPART_EXPR
, type
, call
)));
7780 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7781 NULL_TREE if no simplification can be made. */
7784 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7787 tree realp
, imagp
, ifn
;
7790 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7791 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7794 /* Calculate the result when the argument is a constant. */
7795 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7798 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7800 /* In case we can figure out the real part of arg0 and it is constant zero
7802 if (!targetm
.libc_has_function (function_c99_math_complex
))
7804 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7808 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7809 && real_zerop (realp
))
7811 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7812 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7815 /* In case we can easily decompose real and imaginary parts split cexp
7816 to exp (r) * cexpi (i). */
7817 if (flag_unsafe_math_optimizations
7820 tree rfn
, rcall
, icall
;
7822 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7826 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7830 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7831 icall
= builtin_save_expr (icall
);
7832 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7833 rcall
= builtin_save_expr (rcall
);
7834 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7835 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7837 fold_build1_loc (loc
, REALPART_EXPR
,
7839 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7841 fold_build1_loc (loc
, IMAGPART_EXPR
,
7848 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7849 Return NULL_TREE if no simplification can be made. */
7852 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7854 if (!validate_arg (arg
, REAL_TYPE
))
7857 /* Optimize trunc of constant value. */
7858 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7860 REAL_VALUE_TYPE r
, x
;
7861 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7863 x
= TREE_REAL_CST (arg
);
7864 real_trunc (&r
, TYPE_MODE (type
), &x
);
7865 return build_real (type
, r
);
7868 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7871 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7872 Return NULL_TREE if no simplification can be made. */
7875 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7877 if (!validate_arg (arg
, REAL_TYPE
))
7880 /* Optimize floor of constant value. */
7881 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7885 x
= TREE_REAL_CST (arg
);
7886 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7888 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7891 real_floor (&r
, TYPE_MODE (type
), &x
);
7892 return build_real (type
, r
);
7896 /* Fold floor (x) where x is nonnegative to trunc (x). */
7897 if (tree_expr_nonnegative_p (arg
))
7899 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7901 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7904 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7907 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7908 Return NULL_TREE if no simplification can be made. */
7911 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7913 if (!validate_arg (arg
, REAL_TYPE
))
7916 /* Optimize ceil of constant value. */
7917 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7921 x
= TREE_REAL_CST (arg
);
7922 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7924 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7927 real_ceil (&r
, TYPE_MODE (type
), &x
);
7928 return build_real (type
, r
);
7932 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7935 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7936 Return NULL_TREE if no simplification can be made. */
7939 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7941 if (!validate_arg (arg
, REAL_TYPE
))
7944 /* Optimize round of constant value. */
7945 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7949 x
= TREE_REAL_CST (arg
);
7950 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7952 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7955 real_round (&r
, TYPE_MODE (type
), &x
);
7956 return build_real (type
, r
);
7960 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7963 /* Fold function call to builtin lround, lroundf or lroundl (or the
7964 corresponding long long versions) and other rounding functions. ARG
7965 is the argument to the call. Return NULL_TREE if no simplification
7969 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7971 if (!validate_arg (arg
, REAL_TYPE
))
7974 /* Optimize lround of constant value. */
7975 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7977 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7979 if (real_isfinite (&x
))
7981 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7982 tree ftype
= TREE_TYPE (arg
);
7986 switch (DECL_FUNCTION_CODE (fndecl
))
7988 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7989 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7990 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7991 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7994 CASE_FLT_FN (BUILT_IN_ICEIL
):
7995 CASE_FLT_FN (BUILT_IN_LCEIL
):
7996 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7997 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8000 CASE_FLT_FN (BUILT_IN_IROUND
):
8001 CASE_FLT_FN (BUILT_IN_LROUND
):
8002 CASE_FLT_FN (BUILT_IN_LLROUND
):
8003 real_round (&r
, TYPE_MODE (ftype
), &x
);
8010 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8012 return wide_int_to_tree (itype
, val
);
8016 switch (DECL_FUNCTION_CODE (fndecl
))
8018 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8019 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8020 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8021 if (tree_expr_nonnegative_p (arg
))
8022 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8023 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8028 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8031 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8032 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8033 the argument to the call. Return NULL_TREE if no simplification can
8037 fold_builtin_bitop (tree fndecl
, tree arg
)
8039 if (!validate_arg (arg
, INTEGER_TYPE
))
8042 /* Optimize for constant argument. */
8043 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8045 tree type
= TREE_TYPE (arg
);
8048 switch (DECL_FUNCTION_CODE (fndecl
))
8050 CASE_INT_FN (BUILT_IN_FFS
):
8051 result
= wi::ffs (arg
);
8054 CASE_INT_FN (BUILT_IN_CLZ
):
8055 if (wi::ne_p (arg
, 0))
8056 result
= wi::clz (arg
);
8057 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8058 result
= TYPE_PRECISION (type
);
8061 CASE_INT_FN (BUILT_IN_CTZ
):
8062 if (wi::ne_p (arg
, 0))
8063 result
= wi::ctz (arg
);
8064 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8065 result
= TYPE_PRECISION (type
);
8068 CASE_INT_FN (BUILT_IN_CLRSB
):
8069 result
= wi::clrsb (arg
);
8072 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8073 result
= wi::popcount (arg
);
8076 CASE_INT_FN (BUILT_IN_PARITY
):
8077 result
= wi::parity (arg
);
8084 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8090 /* Fold function call to builtin_bswap and the short, long and long long
8091 variants. Return NULL_TREE if no simplification can be made. */
8093 fold_builtin_bswap (tree fndecl
, tree arg
)
8095 if (! validate_arg (arg
, INTEGER_TYPE
))
8098 /* Optimize constant value. */
8099 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8101 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8103 switch (DECL_FUNCTION_CODE (fndecl
))
8105 case BUILT_IN_BSWAP16
:
8106 case BUILT_IN_BSWAP32
:
8107 case BUILT_IN_BSWAP64
:
8109 signop sgn
= TYPE_SIGN (type
);
8111 wide_int_to_tree (type
,
8112 wide_int::from (arg
, TYPE_PRECISION (type
),
8124 /* A subroutine of fold_builtin to fold the various logarithmic
8125 functions. Return NULL_TREE if no simplification can me made.
8126 FUNC is the corresponding MPFR logarithm function. */
8129 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8130 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8132 if (validate_arg (arg
, REAL_TYPE
))
8134 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8136 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8138 /* Calculate the result when the argument is a constant. */
8139 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8142 /* Special case, optimize logN(expN(x)) = x. */
8143 if (flag_unsafe_math_optimizations
8144 && ((func
== mpfr_log
8145 && (fcode
== BUILT_IN_EXP
8146 || fcode
== BUILT_IN_EXPF
8147 || fcode
== BUILT_IN_EXPL
))
8148 || (func
== mpfr_log2
8149 && (fcode
== BUILT_IN_EXP2
8150 || fcode
== BUILT_IN_EXP2F
8151 || fcode
== BUILT_IN_EXP2L
))
8152 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8153 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8155 /* Optimize logN(func()) for various exponential functions. We
8156 want to determine the value "x" and the power "exponent" in
8157 order to transform logN(x**exponent) into exponent*logN(x). */
8158 if (flag_unsafe_math_optimizations
)
8160 tree exponent
= 0, x
= 0;
8164 CASE_FLT_FN (BUILT_IN_EXP
):
8165 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8166 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8168 exponent
= CALL_EXPR_ARG (arg
, 0);
8170 CASE_FLT_FN (BUILT_IN_EXP2
):
8171 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8172 x
= build_real (type
, dconst2
);
8173 exponent
= CALL_EXPR_ARG (arg
, 0);
8175 CASE_FLT_FN (BUILT_IN_EXP10
):
8176 CASE_FLT_FN (BUILT_IN_POW10
):
8177 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8179 REAL_VALUE_TYPE dconst10
;
8180 real_from_integer (&dconst10
, VOIDmode
, 10, SIGNED
);
8181 x
= build_real (type
, dconst10
);
8183 exponent
= CALL_EXPR_ARG (arg
, 0);
8185 CASE_FLT_FN (BUILT_IN_SQRT
):
8186 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8187 x
= CALL_EXPR_ARG (arg
, 0);
8188 exponent
= build_real (type
, dconsthalf
);
8190 CASE_FLT_FN (BUILT_IN_CBRT
):
8191 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8192 x
= CALL_EXPR_ARG (arg
, 0);
8193 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8196 CASE_FLT_FN (BUILT_IN_POW
):
8197 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8198 x
= CALL_EXPR_ARG (arg
, 0);
8199 exponent
= CALL_EXPR_ARG (arg
, 1);
8205 /* Now perform the optimization. */
8208 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8209 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8217 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8218 NULL_TREE if no simplification can be made. */
8221 fold_builtin_hypot (location_t loc
, tree fndecl
,
8222 tree arg0
, tree arg1
, tree type
)
8224 tree res
, narg0
, narg1
;
8226 if (!validate_arg (arg0
, REAL_TYPE
)
8227 || !validate_arg (arg1
, REAL_TYPE
))
8230 /* Calculate the result when the argument is a constant. */
8231 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8234 /* If either argument to hypot has a negate or abs, strip that off.
8235 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8236 narg0
= fold_strip_sign_ops (arg0
);
8237 narg1
= fold_strip_sign_ops (arg1
);
8240 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8241 narg1
? narg1
: arg1
);
8244 /* If either argument is zero, hypot is fabs of the other. */
8245 if (real_zerop (arg0
))
8246 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8247 else if (real_zerop (arg1
))
8248 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8250 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8251 if (flag_unsafe_math_optimizations
8252 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8254 const REAL_VALUE_TYPE sqrt2_trunc
8255 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8256 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8257 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8258 build_real (type
, sqrt2_trunc
));
8265 /* Fold a builtin function call to pow, powf, or powl. Return
8266 NULL_TREE if no simplification can be made. */
8268 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8272 if (!validate_arg (arg0
, REAL_TYPE
)
8273 || !validate_arg (arg1
, REAL_TYPE
))
8276 /* Calculate the result when the argument is a constant. */
8277 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8280 /* Optimize pow(1.0,y) = 1.0. */
8281 if (real_onep (arg0
))
8282 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8284 if (TREE_CODE (arg1
) == REAL_CST
8285 && !TREE_OVERFLOW (arg1
))
8287 REAL_VALUE_TYPE cint
;
8291 c
= TREE_REAL_CST (arg1
);
8293 /* Optimize pow(x,0.0) = 1.0. */
8294 if (REAL_VALUES_EQUAL (c
, dconst0
))
8295 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8298 /* Optimize pow(x,1.0) = x. */
8299 if (REAL_VALUES_EQUAL (c
, dconst1
))
8302 /* Optimize pow(x,-1.0) = 1.0/x. */
8303 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8304 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8305 build_real (type
, dconst1
), arg0
);
8307 /* Optimize pow(x,0.5) = sqrt(x). */
8308 if (flag_unsafe_math_optimizations
8309 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8311 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8313 if (sqrtfn
!= NULL_TREE
)
8314 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8317 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8318 if (flag_unsafe_math_optimizations
)
8320 const REAL_VALUE_TYPE dconstroot
8321 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8323 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8325 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8326 if (cbrtfn
!= NULL_TREE
)
8327 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8331 /* Check for an integer exponent. */
8332 n
= real_to_integer (&c
);
8333 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8334 if (real_identical (&c
, &cint
))
8336 /* Attempt to evaluate pow at compile-time, unless this should
8337 raise an exception. */
8338 if (TREE_CODE (arg0
) == REAL_CST
8339 && !TREE_OVERFLOW (arg0
)
8341 || (!flag_trapping_math
&& !flag_errno_math
)
8342 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8347 x
= TREE_REAL_CST (arg0
);
8348 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8349 if (flag_unsafe_math_optimizations
|| !inexact
)
8350 return build_real (type
, x
);
8353 /* Strip sign ops from even integer powers. */
8354 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8356 tree narg0
= fold_strip_sign_ops (arg0
);
8358 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8363 if (flag_unsafe_math_optimizations
)
8365 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8367 /* Optimize pow(expN(x),y) = expN(x*y). */
8368 if (BUILTIN_EXPONENT_P (fcode
))
8370 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8371 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8372 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8373 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8376 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8377 if (BUILTIN_SQRT_P (fcode
))
8379 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8380 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8381 build_real (type
, dconsthalf
));
8382 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8385 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8386 if (BUILTIN_CBRT_P (fcode
))
8388 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8389 if (tree_expr_nonnegative_p (arg
))
8391 const REAL_VALUE_TYPE dconstroot
8392 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8393 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8394 build_real (type
, dconstroot
));
8395 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8399 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8400 if (fcode
== BUILT_IN_POW
8401 || fcode
== BUILT_IN_POWF
8402 || fcode
== BUILT_IN_POWL
)
8404 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8405 if (tree_expr_nonnegative_p (arg00
))
8407 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8408 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8409 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8417 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8418 Return NULL_TREE if no simplification can be made. */
8420 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8421 tree arg0
, tree arg1
, tree type
)
8423 if (!validate_arg (arg0
, REAL_TYPE
)
8424 || !validate_arg (arg1
, INTEGER_TYPE
))
8427 /* Optimize pow(1.0,y) = 1.0. */
8428 if (real_onep (arg0
))
8429 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8431 if (tree_fits_shwi_p (arg1
))
8433 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8435 /* Evaluate powi at compile-time. */
8436 if (TREE_CODE (arg0
) == REAL_CST
8437 && !TREE_OVERFLOW (arg0
))
8440 x
= TREE_REAL_CST (arg0
);
8441 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8442 return build_real (type
, x
);
8445 /* Optimize pow(x,0) = 1.0. */
8447 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8450 /* Optimize pow(x,1) = x. */
8454 /* Optimize pow(x,-1) = 1.0/x. */
8456 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8457 build_real (type
, dconst1
), arg0
);
8463 /* A subroutine of fold_builtin to fold the various exponent
8464 functions. Return NULL_TREE if no simplification can be made.
8465 FUNC is the corresponding MPFR exponent function. */
8468 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8469 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8471 if (validate_arg (arg
, REAL_TYPE
))
8473 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8476 /* Calculate the result when the argument is a constant. */
8477 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8480 /* Optimize expN(logN(x)) = x. */
8481 if (flag_unsafe_math_optimizations
)
8483 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8485 if ((func
== mpfr_exp
8486 && (fcode
== BUILT_IN_LOG
8487 || fcode
== BUILT_IN_LOGF
8488 || fcode
== BUILT_IN_LOGL
))
8489 || (func
== mpfr_exp2
8490 && (fcode
== BUILT_IN_LOG2
8491 || fcode
== BUILT_IN_LOG2F
8492 || fcode
== BUILT_IN_LOG2L
))
8493 || (func
== mpfr_exp10
8494 && (fcode
== BUILT_IN_LOG10
8495 || fcode
== BUILT_IN_LOG10F
8496 || fcode
== BUILT_IN_LOG10L
)))
8497 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8504 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8505 Return NULL_TREE if no simplification can be made. */
8508 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8510 tree fn
, len
, lenp1
, call
, type
;
8512 if (!validate_arg (dest
, POINTER_TYPE
)
8513 || !validate_arg (src
, POINTER_TYPE
))
8516 len
= c_strlen (src
, 1);
8518 || TREE_CODE (len
) != INTEGER_CST
)
8521 if (optimize_function_for_size_p (cfun
)
8522 /* If length is zero it's small enough. */
8523 && !integer_zerop (len
))
8526 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8530 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
8531 fold_convert_loc (loc
, size_type_node
, len
),
8532 build_int_cst (size_type_node
, 1));
8533 /* We use dest twice in building our expression. Save it from
8534 multiple expansions. */
8535 dest
= builtin_save_expr (dest
);
8536 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
8538 type
= TREE_TYPE (TREE_TYPE (fndecl
));
8539 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8540 dest
= fold_convert_loc (loc
, type
, dest
);
8541 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
8545 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8546 arguments to the call, and TYPE is its return type.
8547 Return NULL_TREE if no simplification can be made. */
8550 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8552 if (!validate_arg (arg1
, POINTER_TYPE
)
8553 || !validate_arg (arg2
, INTEGER_TYPE
)
8554 || !validate_arg (len
, INTEGER_TYPE
))
8560 if (TREE_CODE (arg2
) != INTEGER_CST
8561 || !tree_fits_uhwi_p (len
))
8564 p1
= c_getstr (arg1
);
8565 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8571 if (target_char_cast (arg2
, &c
))
8574 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8577 return build_int_cst (TREE_TYPE (arg1
), 0);
8579 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8580 return fold_convert_loc (loc
, type
, tem
);
8586 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8587 Return NULL_TREE if no simplification can be made. */
8590 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8592 const char *p1
, *p2
;
8594 if (!validate_arg (arg1
, POINTER_TYPE
)
8595 || !validate_arg (arg2
, POINTER_TYPE
)
8596 || !validate_arg (len
, INTEGER_TYPE
))
8599 /* If the LEN parameter is zero, return zero. */
8600 if (integer_zerop (len
))
8601 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8604 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8605 if (operand_equal_p (arg1
, arg2
, 0))
8606 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8608 p1
= c_getstr (arg1
);
8609 p2
= c_getstr (arg2
);
8611 /* If all arguments are constant, and the value of len is not greater
8612 than the lengths of arg1 and arg2, evaluate at compile-time. */
8613 if (tree_fits_uhwi_p (len
) && p1
&& p2
8614 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8615 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8617 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8620 return integer_one_node
;
8622 return integer_minus_one_node
;
8624 return integer_zero_node
;
8627 /* If len parameter is one, return an expression corresponding to
8628 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8629 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8631 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8632 tree cst_uchar_ptr_node
8633 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8636 = fold_convert_loc (loc
, integer_type_node
,
8637 build1 (INDIRECT_REF
, cst_uchar_node
,
8638 fold_convert_loc (loc
,
8642 = fold_convert_loc (loc
, integer_type_node
,
8643 build1 (INDIRECT_REF
, cst_uchar_node
,
8644 fold_convert_loc (loc
,
8647 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8653 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8654 Return NULL_TREE if no simplification can be made. */
8657 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8659 const char *p1
, *p2
;
8661 if (!validate_arg (arg1
, POINTER_TYPE
)
8662 || !validate_arg (arg2
, POINTER_TYPE
))
8665 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8666 if (operand_equal_p (arg1
, arg2
, 0))
8667 return integer_zero_node
;
8669 p1
= c_getstr (arg1
);
8670 p2
= c_getstr (arg2
);
8674 const int i
= strcmp (p1
, p2
);
8676 return integer_minus_one_node
;
8678 return integer_one_node
;
8680 return integer_zero_node
;
8683 /* If the second arg is "", return *(const unsigned char*)arg1. */
8684 if (p2
&& *p2
== '\0')
8686 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8687 tree cst_uchar_ptr_node
8688 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8690 return fold_convert_loc (loc
, integer_type_node
,
8691 build1 (INDIRECT_REF
, cst_uchar_node
,
8692 fold_convert_loc (loc
,
8697 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8698 if (p1
&& *p1
== '\0')
8700 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8701 tree cst_uchar_ptr_node
8702 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8705 = fold_convert_loc (loc
, integer_type_node
,
8706 build1 (INDIRECT_REF
, cst_uchar_node
,
8707 fold_convert_loc (loc
,
8710 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8716 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8717 Return NULL_TREE if no simplification can be made. */
8720 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8722 const char *p1
, *p2
;
8724 if (!validate_arg (arg1
, POINTER_TYPE
)
8725 || !validate_arg (arg2
, POINTER_TYPE
)
8726 || !validate_arg (len
, INTEGER_TYPE
))
8729 /* If the LEN parameter is zero, return zero. */
8730 if (integer_zerop (len
))
8731 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8734 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8735 if (operand_equal_p (arg1
, arg2
, 0))
8736 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8738 p1
= c_getstr (arg1
);
8739 p2
= c_getstr (arg2
);
8741 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8743 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8745 return integer_one_node
;
8747 return integer_minus_one_node
;
8749 return integer_zero_node
;
8752 /* If the second arg is "", and the length is greater than zero,
8753 return *(const unsigned char*)arg1. */
8754 if (p2
&& *p2
== '\0'
8755 && TREE_CODE (len
) == INTEGER_CST
8756 && tree_int_cst_sgn (len
) == 1)
8758 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8759 tree cst_uchar_ptr_node
8760 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8762 return fold_convert_loc (loc
, integer_type_node
,
8763 build1 (INDIRECT_REF
, cst_uchar_node
,
8764 fold_convert_loc (loc
,
8769 /* If the first arg is "", and the length is greater than zero,
8770 return -*(const unsigned char*)arg2. */
8771 if (p1
&& *p1
== '\0'
8772 && TREE_CODE (len
) == INTEGER_CST
8773 && tree_int_cst_sgn (len
) == 1)
8775 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8776 tree cst_uchar_ptr_node
8777 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8779 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8780 build1 (INDIRECT_REF
, cst_uchar_node
,
8781 fold_convert_loc (loc
,
8784 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8787 /* If len parameter is one, return an expression corresponding to
8788 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8789 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8791 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8792 tree cst_uchar_ptr_node
8793 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8795 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8796 build1 (INDIRECT_REF
, cst_uchar_node
,
8797 fold_convert_loc (loc
,
8800 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8801 build1 (INDIRECT_REF
, cst_uchar_node
,
8802 fold_convert_loc (loc
,
8805 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8811 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8812 ARG. Return NULL_TREE if no simplification can be made. */
8815 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8817 if (!validate_arg (arg
, REAL_TYPE
))
8820 /* If ARG is a compile-time constant, determine the result. */
8821 if (TREE_CODE (arg
) == REAL_CST
8822 && !TREE_OVERFLOW (arg
))
8826 c
= TREE_REAL_CST (arg
);
8827 return (REAL_VALUE_NEGATIVE (c
)
8828 ? build_one_cst (type
)
8829 : build_zero_cst (type
));
8832 /* If ARG is non-negative, the result is always zero. */
8833 if (tree_expr_nonnegative_p (arg
))
8834 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8836 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8837 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
8838 return fold_convert (type
,
8839 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8840 build_real (TREE_TYPE (arg
), dconst0
)));
8845 /* Fold function call to builtin copysign, copysignf or copysignl with
8846 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8850 fold_builtin_copysign (location_t loc
, tree fndecl
,
8851 tree arg1
, tree arg2
, tree type
)
8855 if (!validate_arg (arg1
, REAL_TYPE
)
8856 || !validate_arg (arg2
, REAL_TYPE
))
8859 /* copysign(X,X) is X. */
8860 if (operand_equal_p (arg1
, arg2
, 0))
8861 return fold_convert_loc (loc
, type
, arg1
);
8863 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8864 if (TREE_CODE (arg1
) == REAL_CST
8865 && TREE_CODE (arg2
) == REAL_CST
8866 && !TREE_OVERFLOW (arg1
)
8867 && !TREE_OVERFLOW (arg2
))
8869 REAL_VALUE_TYPE c1
, c2
;
8871 c1
= TREE_REAL_CST (arg1
);
8872 c2
= TREE_REAL_CST (arg2
);
8873 /* c1.sign := c2.sign. */
8874 real_copysign (&c1
, &c2
);
8875 return build_real (type
, c1
);
8878 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8879 Remember to evaluate Y for side-effects. */
8880 if (tree_expr_nonnegative_p (arg2
))
8881 return omit_one_operand_loc (loc
, type
,
8882 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8885 /* Strip sign changing operations for the first argument. */
8886 tem
= fold_strip_sign_ops (arg1
);
8888 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
8893 /* Fold a call to builtin isascii with argument ARG. */
8896 fold_builtin_isascii (location_t loc
, tree arg
)
8898 if (!validate_arg (arg
, INTEGER_TYPE
))
8902 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8903 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8904 build_int_cst (integer_type_node
,
8905 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8906 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8907 arg
, integer_zero_node
);
8911 /* Fold a call to builtin toascii with argument ARG. */
8914 fold_builtin_toascii (location_t loc
, tree arg
)
8916 if (!validate_arg (arg
, INTEGER_TYPE
))
8919 /* Transform toascii(c) -> (c & 0x7f). */
8920 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8921 build_int_cst (integer_type_node
, 0x7f));
8924 /* Fold a call to builtin isdigit with argument ARG. */
8927 fold_builtin_isdigit (location_t loc
, tree arg
)
8929 if (!validate_arg (arg
, INTEGER_TYPE
))
8933 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8934 /* According to the C standard, isdigit is unaffected by locale.
8935 However, it definitely is affected by the target character set. */
8936 unsigned HOST_WIDE_INT target_digit0
8937 = lang_hooks
.to_target_charset ('0');
8939 if (target_digit0
== 0)
8942 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8943 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8944 build_int_cst (unsigned_type_node
, target_digit0
));
8945 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8946 build_int_cst (unsigned_type_node
, 9));
8950 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8953 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8955 if (!validate_arg (arg
, REAL_TYPE
))
8958 arg
= fold_convert_loc (loc
, type
, arg
);
8959 if (TREE_CODE (arg
) == REAL_CST
)
8960 return fold_abs_const (arg
, type
);
8961 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8964 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8967 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8969 if (!validate_arg (arg
, INTEGER_TYPE
))
8972 arg
= fold_convert_loc (loc
, type
, arg
);
8973 if (TREE_CODE (arg
) == INTEGER_CST
)
8974 return fold_abs_const (arg
, type
);
8975 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8978 /* Fold a fma operation with arguments ARG[012]. */
8981 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
8982 tree type
, tree arg0
, tree arg1
, tree arg2
)
8984 if (TREE_CODE (arg0
) == REAL_CST
8985 && TREE_CODE (arg1
) == REAL_CST
8986 && TREE_CODE (arg2
) == REAL_CST
)
8987 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
8992 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8995 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8997 if (validate_arg (arg0
, REAL_TYPE
)
8998 && validate_arg (arg1
, REAL_TYPE
)
8999 && validate_arg (arg2
, REAL_TYPE
))
9001 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9005 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9006 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9007 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9012 /* Fold a call to builtin fmin or fmax. */
9015 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9016 tree type
, bool max
)
9018 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9020 /* Calculate the result when the argument is a constant. */
9021 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9026 /* If either argument is NaN, return the other one. Avoid the
9027 transformation if we get (and honor) a signalling NaN. Using
9028 omit_one_operand() ensures we create a non-lvalue. */
9029 if (TREE_CODE (arg0
) == REAL_CST
9030 && real_isnan (&TREE_REAL_CST (arg0
))
9031 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9032 || ! TREE_REAL_CST (arg0
).signalling
))
9033 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9034 if (TREE_CODE (arg1
) == REAL_CST
9035 && real_isnan (&TREE_REAL_CST (arg1
))
9036 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9037 || ! TREE_REAL_CST (arg1
).signalling
))
9038 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9040 /* Transform fmin/fmax(x,x) -> x. */
9041 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9042 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9044 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9045 functions to return the numeric arg if the other one is NaN.
9046 These tree codes don't honor that, so only transform if
9047 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9048 handled, so we don't have to worry about it either. */
9049 if (flag_finite_math_only
)
9050 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9051 fold_convert_loc (loc
, type
, arg0
),
9052 fold_convert_loc (loc
, type
, arg1
));
9057 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9060 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9062 if (validate_arg (arg
, COMPLEX_TYPE
)
9063 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9065 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9069 tree new_arg
= builtin_save_expr (arg
);
9070 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9071 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9072 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9079 /* Fold a call to builtin logb/ilogb. */
9082 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9084 if (! validate_arg (arg
, REAL_TYPE
))
9089 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9091 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9097 /* If arg is Inf or NaN and we're logb, return it. */
9098 if (TREE_CODE (rettype
) == REAL_TYPE
)
9100 /* For logb(-Inf) we have to return +Inf. */
9101 if (real_isinf (value
) && real_isneg (value
))
9103 REAL_VALUE_TYPE tem
;
9105 return build_real (rettype
, tem
);
9107 return fold_convert_loc (loc
, rettype
, arg
);
9109 /* Fall through... */
9111 /* Zero may set errno and/or raise an exception for logb, also
9112 for ilogb we don't know FP_ILOGB0. */
9115 /* For normal numbers, proceed iff radix == 2. In GCC,
9116 normalized significands are in the range [0.5, 1.0). We
9117 want the exponent as if they were [1.0, 2.0) so get the
9118 exponent and subtract 1. */
9119 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9120 return fold_convert_loc (loc
, rettype
,
9121 build_int_cst (integer_type_node
,
9122 REAL_EXP (value
)-1));
9130 /* Fold a call to builtin significand, if radix == 2. */
9133 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9135 if (! validate_arg (arg
, REAL_TYPE
))
9140 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9142 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9149 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9150 return fold_convert_loc (loc
, rettype
, arg
);
9152 /* For normal numbers, proceed iff radix == 2. */
9153 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9155 REAL_VALUE_TYPE result
= *value
;
9156 /* In GCC, normalized significands are in the range [0.5,
9157 1.0). We want them to be [1.0, 2.0) so set the
9159 SET_REAL_EXP (&result
, 1);
9160 return build_real (rettype
, result
);
9169 /* Fold a call to builtin frexp, we can assume the base is 2. */
9172 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9174 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9179 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9182 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9184 /* Proceed if a valid pointer type was passed in. */
9185 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9187 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9193 /* For +-0, return (*exp = 0, +-0). */
9194 exp
= integer_zero_node
;
9199 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9200 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9203 /* Since the frexp function always expects base 2, and in
9204 GCC normalized significands are already in the range
9205 [0.5, 1.0), we have exactly what frexp wants. */
9206 REAL_VALUE_TYPE frac_rvt
= *value
;
9207 SET_REAL_EXP (&frac_rvt
, 0);
9208 frac
= build_real (rettype
, frac_rvt
);
9209 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9216 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9217 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9218 TREE_SIDE_EFFECTS (arg1
) = 1;
9219 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9225 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9226 then we can assume the base is two. If it's false, then we have to
9227 check the mode of the TYPE parameter in certain cases. */
9230 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9231 tree type
, bool ldexp
)
9233 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9238 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9239 if (real_zerop (arg0
) || integer_zerop (arg1
)
9240 || (TREE_CODE (arg0
) == REAL_CST
9241 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9242 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9244 /* If both arguments are constant, then try to evaluate it. */
9245 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9246 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9247 && tree_fits_shwi_p (arg1
))
9249 /* Bound the maximum adjustment to twice the range of the
9250 mode's valid exponents. Use abs to ensure the range is
9251 positive as a sanity check. */
9252 const long max_exp_adj
= 2 *
9253 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9254 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9256 /* Get the user-requested adjustment. */
9257 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9259 /* The requested adjustment must be inside this range. This
9260 is a preliminary cap to avoid things like overflow, we
9261 may still fail to compute the result for other reasons. */
9262 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9264 REAL_VALUE_TYPE initial_result
;
9266 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9268 /* Ensure we didn't overflow. */
9269 if (! real_isinf (&initial_result
))
9271 const REAL_VALUE_TYPE trunc_result
9272 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9274 /* Only proceed if the target mode can hold the
9276 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9277 return build_real (type
, trunc_result
);
9286 /* Fold a call to builtin modf. */
9289 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9291 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9296 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9299 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9301 /* Proceed if a valid pointer type was passed in. */
9302 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9304 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9305 REAL_VALUE_TYPE trunc
, frac
;
9311 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9312 trunc
= frac
= *value
;
9315 /* For +-Inf, return (*arg1 = arg0, +-0). */
9317 frac
.sign
= value
->sign
;
9321 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9322 real_trunc (&trunc
, VOIDmode
, value
);
9323 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9324 /* If the original number was negative and already
9325 integral, then the fractional part is -0.0. */
9326 if (value
->sign
&& frac
.cl
== rvc_zero
)
9327 frac
.sign
= value
->sign
;
9331 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9332 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9333 build_real (rettype
, trunc
));
9334 TREE_SIDE_EFFECTS (arg1
) = 1;
9335 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9336 build_real (rettype
, frac
));
9342 /* Given a location LOC, an interclass builtin function decl FNDECL
9343 and its single argument ARG, return an folded expression computing
9344 the same, or NULL_TREE if we either couldn't or didn't want to fold
9345 (the latter happen if there's an RTL instruction available). */
9348 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9350 enum machine_mode mode
;
9352 if (!validate_arg (arg
, REAL_TYPE
))
9355 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9358 mode
= TYPE_MODE (TREE_TYPE (arg
));
9360 /* If there is no optab, try generic code. */
9361 switch (DECL_FUNCTION_CODE (fndecl
))
9365 CASE_FLT_FN (BUILT_IN_ISINF
):
9367 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9368 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9369 tree
const type
= TREE_TYPE (arg
);
9373 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9374 real_from_string (&r
, buf
);
9375 result
= build_call_expr (isgr_fn
, 2,
9376 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9377 build_real (type
, r
));
9380 CASE_FLT_FN (BUILT_IN_FINITE
):
9381 case BUILT_IN_ISFINITE
:
9383 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9384 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9385 tree
const type
= TREE_TYPE (arg
);
9389 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9390 real_from_string (&r
, buf
);
9391 result
= build_call_expr (isle_fn
, 2,
9392 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9393 build_real (type
, r
));
9394 /*result = fold_build2_loc (loc, UNGT_EXPR,
9395 TREE_TYPE (TREE_TYPE (fndecl)),
9396 fold_build1_loc (loc, ABS_EXPR, type, arg),
9397 build_real (type, r));
9398 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9399 TREE_TYPE (TREE_TYPE (fndecl)),
9403 case BUILT_IN_ISNORMAL
:
9405 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9406 islessequal(fabs(x),DBL_MAX). */
9407 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9408 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9409 tree
const type
= TREE_TYPE (arg
);
9410 REAL_VALUE_TYPE rmax
, rmin
;
9413 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9414 real_from_string (&rmax
, buf
);
9415 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9416 real_from_string (&rmin
, buf
);
9417 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9418 result
= build_call_expr (isle_fn
, 2, arg
,
9419 build_real (type
, rmax
));
9420 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9421 build_call_expr (isge_fn
, 2, arg
,
9422 build_real (type
, rmin
)));
9432 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9433 ARG is the argument for the call. */
9436 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9438 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9441 if (!validate_arg (arg
, REAL_TYPE
))
9444 switch (builtin_index
)
9446 case BUILT_IN_ISINF
:
9447 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9448 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9450 if (TREE_CODE (arg
) == REAL_CST
)
9452 r
= TREE_REAL_CST (arg
);
9453 if (real_isinf (&r
))
9454 return real_compare (GT_EXPR
, &r
, &dconst0
)
9455 ? integer_one_node
: integer_minus_one_node
;
9457 return integer_zero_node
;
9462 case BUILT_IN_ISINF_SIGN
:
9464 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9465 /* In a boolean context, GCC will fold the inner COND_EXPR to
9466 1. So e.g. "if (isinf_sign(x))" would be folded to just
9467 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9468 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9469 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9470 tree tmp
= NULL_TREE
;
9472 arg
= builtin_save_expr (arg
);
9474 if (signbit_fn
&& isinf_fn
)
9476 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9477 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9479 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9480 signbit_call
, integer_zero_node
);
9481 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9482 isinf_call
, integer_zero_node
);
9484 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9485 integer_minus_one_node
, integer_one_node
);
9486 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9494 case BUILT_IN_ISFINITE
:
9495 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9496 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9497 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9499 if (TREE_CODE (arg
) == REAL_CST
)
9501 r
= TREE_REAL_CST (arg
);
9502 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9507 case BUILT_IN_ISNAN
:
9508 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9509 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9511 if (TREE_CODE (arg
) == REAL_CST
)
9513 r
= TREE_REAL_CST (arg
);
9514 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9517 arg
= builtin_save_expr (arg
);
9518 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9525 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9526 This builtin will generate code to return the appropriate floating
9527 point classification depending on the value of the floating point
9528 number passed in. The possible return values must be supplied as
9529 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9530 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9531 one floating point argument which is "type generic". */
9534 fold_builtin_fpclassify (location_t loc
, tree exp
)
9536 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9537 arg
, type
, res
, tmp
;
9538 enum machine_mode mode
;
9542 /* Verify the required arguments in the original call. */
9543 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
9544 INTEGER_TYPE
, INTEGER_TYPE
,
9545 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
9548 fp_nan
= CALL_EXPR_ARG (exp
, 0);
9549 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
9550 fp_normal
= CALL_EXPR_ARG (exp
, 2);
9551 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
9552 fp_zero
= CALL_EXPR_ARG (exp
, 4);
9553 arg
= CALL_EXPR_ARG (exp
, 5);
9554 type
= TREE_TYPE (arg
);
9555 mode
= TYPE_MODE (type
);
9556 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9560 (fabs(x) == Inf ? FP_INFINITE :
9561 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9562 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9564 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9565 build_real (type
, dconst0
));
9566 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9567 tmp
, fp_zero
, fp_subnormal
);
9569 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9570 real_from_string (&r
, buf
);
9571 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9572 arg
, build_real (type
, r
));
9573 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9575 if (HONOR_INFINITIES (mode
))
9578 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9579 build_real (type
, r
));
9580 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9584 if (HONOR_NANS (mode
))
9586 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9587 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9593 /* Fold a call to an unordered comparison function such as
9594 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9595 being called and ARG0 and ARG1 are the arguments for the call.
9596 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9597 the opposite of the desired result. UNORDERED_CODE is used
9598 for modes that can hold NaNs and ORDERED_CODE is used for
9602 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9603 enum tree_code unordered_code
,
9604 enum tree_code ordered_code
)
9606 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9607 enum tree_code code
;
9609 enum tree_code code0
, code1
;
9610 tree cmp_type
= NULL_TREE
;
9612 type0
= TREE_TYPE (arg0
);
9613 type1
= TREE_TYPE (arg1
);
9615 code0
= TREE_CODE (type0
);
9616 code1
= TREE_CODE (type1
);
9618 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9619 /* Choose the wider of two real types. */
9620 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9622 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9624 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9627 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9628 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9630 if (unordered_code
== UNORDERED_EXPR
)
9632 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9633 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9634 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9637 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9639 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9640 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9643 /* Fold a call to built-in function FNDECL with 0 arguments.
9644 IGNORE is true if the result of the function call is ignored. This
9645 function returns NULL_TREE if no simplification was possible. */
9648 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9650 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9651 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9654 CASE_FLT_FN (BUILT_IN_INF
):
9655 case BUILT_IN_INFD32
:
9656 case BUILT_IN_INFD64
:
9657 case BUILT_IN_INFD128
:
9658 return fold_builtin_inf (loc
, type
, true);
9660 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9661 return fold_builtin_inf (loc
, type
, false);
9663 case BUILT_IN_CLASSIFY_TYPE
:
9664 return fold_builtin_classify_type (NULL_TREE
);
9666 case BUILT_IN_UNREACHABLE
:
9667 if (flag_sanitize
& SANITIZE_UNREACHABLE
9668 && (current_function_decl
== NULL
9669 || !lookup_attribute ("no_sanitize_undefined",
9670 DECL_ATTRIBUTES (current_function_decl
))))
9671 return ubsan_instrument_unreachable (loc
);
9680 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9681 IGNORE is true if the result of the function call is ignored. This
9682 function returns NULL_TREE if no simplification was possible. */
9685 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
9687 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9688 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9691 case BUILT_IN_CONSTANT_P
:
9693 tree val
= fold_builtin_constant_p (arg0
);
9695 /* Gimplification will pull the CALL_EXPR for the builtin out of
9696 an if condition. When not optimizing, we'll not CSE it back.
9697 To avoid link error types of regressions, return false now. */
9698 if (!val
&& !optimize
)
9699 val
= integer_zero_node
;
9704 case BUILT_IN_CLASSIFY_TYPE
:
9705 return fold_builtin_classify_type (arg0
);
9707 case BUILT_IN_STRLEN
:
9708 return fold_builtin_strlen (loc
, type
, arg0
);
9710 CASE_FLT_FN (BUILT_IN_FABS
):
9711 case BUILT_IN_FABSD32
:
9712 case BUILT_IN_FABSD64
:
9713 case BUILT_IN_FABSD128
:
9714 return fold_builtin_fabs (loc
, arg0
, type
);
9718 case BUILT_IN_LLABS
:
9719 case BUILT_IN_IMAXABS
:
9720 return fold_builtin_abs (loc
, arg0
, type
);
9722 CASE_FLT_FN (BUILT_IN_CONJ
):
9723 if (validate_arg (arg0
, COMPLEX_TYPE
)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9725 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9728 CASE_FLT_FN (BUILT_IN_CREAL
):
9729 if (validate_arg (arg0
, COMPLEX_TYPE
)
9730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9731 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
9734 CASE_FLT_FN (BUILT_IN_CIMAG
):
9735 if (validate_arg (arg0
, COMPLEX_TYPE
)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9737 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9740 CASE_FLT_FN (BUILT_IN_CCOS
):
9741 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9743 CASE_FLT_FN (BUILT_IN_CCOSH
):
9744 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9746 CASE_FLT_FN (BUILT_IN_CPROJ
):
9747 return fold_builtin_cproj (loc
, arg0
, type
);
9749 CASE_FLT_FN (BUILT_IN_CSIN
):
9750 if (validate_arg (arg0
, COMPLEX_TYPE
)
9751 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9752 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9755 CASE_FLT_FN (BUILT_IN_CSINH
):
9756 if (validate_arg (arg0
, COMPLEX_TYPE
)
9757 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9758 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9761 CASE_FLT_FN (BUILT_IN_CTAN
):
9762 if (validate_arg (arg0
, COMPLEX_TYPE
)
9763 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9764 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9767 CASE_FLT_FN (BUILT_IN_CTANH
):
9768 if (validate_arg (arg0
, COMPLEX_TYPE
)
9769 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9770 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9773 CASE_FLT_FN (BUILT_IN_CLOG
):
9774 if (validate_arg (arg0
, COMPLEX_TYPE
)
9775 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9776 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9779 CASE_FLT_FN (BUILT_IN_CSQRT
):
9780 if (validate_arg (arg0
, COMPLEX_TYPE
)
9781 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9782 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9785 CASE_FLT_FN (BUILT_IN_CASIN
):
9786 if (validate_arg (arg0
, COMPLEX_TYPE
)
9787 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9788 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9791 CASE_FLT_FN (BUILT_IN_CACOS
):
9792 if (validate_arg (arg0
, COMPLEX_TYPE
)
9793 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9794 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9797 CASE_FLT_FN (BUILT_IN_CATAN
):
9798 if (validate_arg (arg0
, COMPLEX_TYPE
)
9799 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9800 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9803 CASE_FLT_FN (BUILT_IN_CASINH
):
9804 if (validate_arg (arg0
, COMPLEX_TYPE
)
9805 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9806 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9809 CASE_FLT_FN (BUILT_IN_CACOSH
):
9810 if (validate_arg (arg0
, COMPLEX_TYPE
)
9811 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9812 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9815 CASE_FLT_FN (BUILT_IN_CATANH
):
9816 if (validate_arg (arg0
, COMPLEX_TYPE
)
9817 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9818 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9821 CASE_FLT_FN (BUILT_IN_CABS
):
9822 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
9824 CASE_FLT_FN (BUILT_IN_CARG
):
9825 return fold_builtin_carg (loc
, arg0
, type
);
9827 CASE_FLT_FN (BUILT_IN_SQRT
):
9828 return fold_builtin_sqrt (loc
, arg0
, type
);
9830 CASE_FLT_FN (BUILT_IN_CBRT
):
9831 return fold_builtin_cbrt (loc
, arg0
, type
);
9833 CASE_FLT_FN (BUILT_IN_ASIN
):
9834 if (validate_arg (arg0
, REAL_TYPE
))
9835 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9836 &dconstm1
, &dconst1
, true);
9839 CASE_FLT_FN (BUILT_IN_ACOS
):
9840 if (validate_arg (arg0
, REAL_TYPE
))
9841 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9842 &dconstm1
, &dconst1
, true);
9845 CASE_FLT_FN (BUILT_IN_ATAN
):
9846 if (validate_arg (arg0
, REAL_TYPE
))
9847 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9850 CASE_FLT_FN (BUILT_IN_ASINH
):
9851 if (validate_arg (arg0
, REAL_TYPE
))
9852 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9855 CASE_FLT_FN (BUILT_IN_ACOSH
):
9856 if (validate_arg (arg0
, REAL_TYPE
))
9857 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9858 &dconst1
, NULL
, true);
9861 CASE_FLT_FN (BUILT_IN_ATANH
):
9862 if (validate_arg (arg0
, REAL_TYPE
))
9863 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9864 &dconstm1
, &dconst1
, false);
9867 CASE_FLT_FN (BUILT_IN_SIN
):
9868 if (validate_arg (arg0
, REAL_TYPE
))
9869 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9872 CASE_FLT_FN (BUILT_IN_COS
):
9873 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
9875 CASE_FLT_FN (BUILT_IN_TAN
):
9876 return fold_builtin_tan (arg0
, type
);
9878 CASE_FLT_FN (BUILT_IN_CEXP
):
9879 return fold_builtin_cexp (loc
, arg0
, type
);
9881 CASE_FLT_FN (BUILT_IN_CEXPI
):
9882 if (validate_arg (arg0
, REAL_TYPE
))
9883 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9886 CASE_FLT_FN (BUILT_IN_SINH
):
9887 if (validate_arg (arg0
, REAL_TYPE
))
9888 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9891 CASE_FLT_FN (BUILT_IN_COSH
):
9892 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
9894 CASE_FLT_FN (BUILT_IN_TANH
):
9895 if (validate_arg (arg0
, REAL_TYPE
))
9896 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9899 CASE_FLT_FN (BUILT_IN_ERF
):
9900 if (validate_arg (arg0
, REAL_TYPE
))
9901 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9904 CASE_FLT_FN (BUILT_IN_ERFC
):
9905 if (validate_arg (arg0
, REAL_TYPE
))
9906 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9909 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9910 if (validate_arg (arg0
, REAL_TYPE
))
9911 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9914 CASE_FLT_FN (BUILT_IN_EXP
):
9915 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
9917 CASE_FLT_FN (BUILT_IN_EXP2
):
9918 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
9920 CASE_FLT_FN (BUILT_IN_EXP10
):
9921 CASE_FLT_FN (BUILT_IN_POW10
):
9922 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
9924 CASE_FLT_FN (BUILT_IN_EXPM1
):
9925 if (validate_arg (arg0
, REAL_TYPE
))
9926 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9929 CASE_FLT_FN (BUILT_IN_LOG
):
9930 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
9932 CASE_FLT_FN (BUILT_IN_LOG2
):
9933 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
9935 CASE_FLT_FN (BUILT_IN_LOG10
):
9936 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
9938 CASE_FLT_FN (BUILT_IN_LOG1P
):
9939 if (validate_arg (arg0
, REAL_TYPE
))
9940 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9941 &dconstm1
, NULL
, false);
9944 CASE_FLT_FN (BUILT_IN_J0
):
9945 if (validate_arg (arg0
, REAL_TYPE
))
9946 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9950 CASE_FLT_FN (BUILT_IN_J1
):
9951 if (validate_arg (arg0
, REAL_TYPE
))
9952 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9956 CASE_FLT_FN (BUILT_IN_Y0
):
9957 if (validate_arg (arg0
, REAL_TYPE
))
9958 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9959 &dconst0
, NULL
, false);
9962 CASE_FLT_FN (BUILT_IN_Y1
):
9963 if (validate_arg (arg0
, REAL_TYPE
))
9964 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9965 &dconst0
, NULL
, false);
9968 CASE_FLT_FN (BUILT_IN_NAN
):
9969 case BUILT_IN_NAND32
:
9970 case BUILT_IN_NAND64
:
9971 case BUILT_IN_NAND128
:
9972 return fold_builtin_nan (arg0
, type
, true);
9974 CASE_FLT_FN (BUILT_IN_NANS
):
9975 return fold_builtin_nan (arg0
, type
, false);
9977 CASE_FLT_FN (BUILT_IN_FLOOR
):
9978 return fold_builtin_floor (loc
, fndecl
, arg0
);
9980 CASE_FLT_FN (BUILT_IN_CEIL
):
9981 return fold_builtin_ceil (loc
, fndecl
, arg0
);
9983 CASE_FLT_FN (BUILT_IN_TRUNC
):
9984 return fold_builtin_trunc (loc
, fndecl
, arg0
);
9986 CASE_FLT_FN (BUILT_IN_ROUND
):
9987 return fold_builtin_round (loc
, fndecl
, arg0
);
9989 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9990 CASE_FLT_FN (BUILT_IN_RINT
):
9991 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
9993 CASE_FLT_FN (BUILT_IN_ICEIL
):
9994 CASE_FLT_FN (BUILT_IN_LCEIL
):
9995 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9996 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9997 CASE_FLT_FN (BUILT_IN_IFLOOR
):
9998 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9999 CASE_FLT_FN (BUILT_IN_IROUND
):
10000 CASE_FLT_FN (BUILT_IN_LROUND
):
10001 CASE_FLT_FN (BUILT_IN_LLROUND
):
10002 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10004 CASE_FLT_FN (BUILT_IN_IRINT
):
10005 CASE_FLT_FN (BUILT_IN_LRINT
):
10006 CASE_FLT_FN (BUILT_IN_LLRINT
):
10007 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10009 case BUILT_IN_BSWAP16
:
10010 case BUILT_IN_BSWAP32
:
10011 case BUILT_IN_BSWAP64
:
10012 return fold_builtin_bswap (fndecl
, arg0
);
10014 CASE_INT_FN (BUILT_IN_FFS
):
10015 CASE_INT_FN (BUILT_IN_CLZ
):
10016 CASE_INT_FN (BUILT_IN_CTZ
):
10017 CASE_INT_FN (BUILT_IN_CLRSB
):
10018 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10019 CASE_INT_FN (BUILT_IN_PARITY
):
10020 return fold_builtin_bitop (fndecl
, arg0
);
10022 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10023 return fold_builtin_signbit (loc
, arg0
, type
);
10025 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10026 return fold_builtin_significand (loc
, arg0
, type
);
10028 CASE_FLT_FN (BUILT_IN_ILOGB
):
10029 CASE_FLT_FN (BUILT_IN_LOGB
):
10030 return fold_builtin_logb (loc
, arg0
, type
);
10032 case BUILT_IN_ISASCII
:
10033 return fold_builtin_isascii (loc
, arg0
);
10035 case BUILT_IN_TOASCII
:
10036 return fold_builtin_toascii (loc
, arg0
);
10038 case BUILT_IN_ISDIGIT
:
10039 return fold_builtin_isdigit (loc
, arg0
);
10041 CASE_FLT_FN (BUILT_IN_FINITE
):
10042 case BUILT_IN_FINITED32
:
10043 case BUILT_IN_FINITED64
:
10044 case BUILT_IN_FINITED128
:
10045 case BUILT_IN_ISFINITE
:
10047 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10050 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10053 CASE_FLT_FN (BUILT_IN_ISINF
):
10054 case BUILT_IN_ISINFD32
:
10055 case BUILT_IN_ISINFD64
:
10056 case BUILT_IN_ISINFD128
:
10058 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10061 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10064 case BUILT_IN_ISNORMAL
:
10065 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10067 case BUILT_IN_ISINF_SIGN
:
10068 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10070 CASE_FLT_FN (BUILT_IN_ISNAN
):
10071 case BUILT_IN_ISNAND32
:
10072 case BUILT_IN_ISNAND64
:
10073 case BUILT_IN_ISNAND128
:
10074 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10076 case BUILT_IN_PRINTF
:
10077 case BUILT_IN_PRINTF_UNLOCKED
:
10078 case BUILT_IN_VPRINTF
:
10079 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10081 case BUILT_IN_FREE
:
10082 if (integer_zerop (arg0
))
10083 return build_empty_stmt (loc
);
10094 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10095 IGNORE is true if the result of the function call is ignored. This
10096 function returns NULL_TREE if no simplification was possible. */
10099 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10101 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10102 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10106 CASE_FLT_FN (BUILT_IN_JN
):
10107 if (validate_arg (arg0
, INTEGER_TYPE
)
10108 && validate_arg (arg1
, REAL_TYPE
))
10109 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10112 CASE_FLT_FN (BUILT_IN_YN
):
10113 if (validate_arg (arg0
, INTEGER_TYPE
)
10114 && validate_arg (arg1
, REAL_TYPE
))
10115 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10119 CASE_FLT_FN (BUILT_IN_DREM
):
10120 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10121 if (validate_arg (arg0
, REAL_TYPE
)
10122 && validate_arg (arg1
, REAL_TYPE
))
10123 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10126 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10127 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10128 if (validate_arg (arg0
, REAL_TYPE
)
10129 && validate_arg (arg1
, POINTER_TYPE
))
10130 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10133 CASE_FLT_FN (BUILT_IN_ATAN2
):
10134 if (validate_arg (arg0
, REAL_TYPE
)
10135 && validate_arg (arg1
, REAL_TYPE
))
10136 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10139 CASE_FLT_FN (BUILT_IN_FDIM
):
10140 if (validate_arg (arg0
, REAL_TYPE
)
10141 && validate_arg (arg1
, REAL_TYPE
))
10142 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10145 CASE_FLT_FN (BUILT_IN_HYPOT
):
10146 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10148 CASE_FLT_FN (BUILT_IN_CPOW
):
10149 if (validate_arg (arg0
, COMPLEX_TYPE
)
10150 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10151 && validate_arg (arg1
, COMPLEX_TYPE
)
10152 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10153 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10156 CASE_FLT_FN (BUILT_IN_LDEXP
):
10157 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10158 CASE_FLT_FN (BUILT_IN_SCALBN
):
10159 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10160 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10161 type
, /*ldexp=*/false);
10163 CASE_FLT_FN (BUILT_IN_FREXP
):
10164 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10166 CASE_FLT_FN (BUILT_IN_MODF
):
10167 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10169 case BUILT_IN_STRSTR
:
10170 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10172 case BUILT_IN_STRSPN
:
10173 return fold_builtin_strspn (loc
, arg0
, arg1
);
10175 case BUILT_IN_STRCSPN
:
10176 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10178 case BUILT_IN_STRCHR
:
10179 case BUILT_IN_INDEX
:
10180 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10182 case BUILT_IN_STRRCHR
:
10183 case BUILT_IN_RINDEX
:
10184 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10186 case BUILT_IN_STPCPY
:
10189 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10193 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10196 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10199 case BUILT_IN_STRCMP
:
10200 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10202 case BUILT_IN_STRPBRK
:
10203 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10205 case BUILT_IN_EXPECT
:
10206 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10208 CASE_FLT_FN (BUILT_IN_POW
):
10209 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10211 CASE_FLT_FN (BUILT_IN_POWI
):
10212 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10214 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10215 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10217 CASE_FLT_FN (BUILT_IN_FMIN
):
10218 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10220 CASE_FLT_FN (BUILT_IN_FMAX
):
10221 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10223 case BUILT_IN_ISGREATER
:
10224 return fold_builtin_unordered_cmp (loc
, fndecl
,
10225 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10226 case BUILT_IN_ISGREATEREQUAL
:
10227 return fold_builtin_unordered_cmp (loc
, fndecl
,
10228 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10229 case BUILT_IN_ISLESS
:
10230 return fold_builtin_unordered_cmp (loc
, fndecl
,
10231 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10232 case BUILT_IN_ISLESSEQUAL
:
10233 return fold_builtin_unordered_cmp (loc
, fndecl
,
10234 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10235 case BUILT_IN_ISLESSGREATER
:
10236 return fold_builtin_unordered_cmp (loc
, fndecl
,
10237 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10238 case BUILT_IN_ISUNORDERED
:
10239 return fold_builtin_unordered_cmp (loc
, fndecl
,
10240 arg0
, arg1
, UNORDERED_EXPR
,
10243 /* We do the folding for va_start in the expander. */
10244 case BUILT_IN_VA_START
:
10247 case BUILT_IN_OBJECT_SIZE
:
10248 return fold_builtin_object_size (arg0
, arg1
);
10250 case BUILT_IN_PRINTF
:
10251 case BUILT_IN_PRINTF_UNLOCKED
:
10252 case BUILT_IN_VPRINTF
:
10253 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10255 case BUILT_IN_PRINTF_CHK
:
10256 case BUILT_IN_VPRINTF_CHK
:
10257 if (!validate_arg (arg0
, INTEGER_TYPE
)
10258 || TREE_SIDE_EFFECTS (arg0
))
10261 return fold_builtin_printf (loc
, fndecl
,
10262 arg1
, NULL_TREE
, ignore
, fcode
);
10265 case BUILT_IN_FPRINTF
:
10266 case BUILT_IN_FPRINTF_UNLOCKED
:
10267 case BUILT_IN_VFPRINTF
:
10268 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10271 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10272 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10274 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10275 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10283 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10284 and ARG2. IGNORE is true if the result of the function call is ignored.
10285 This function returns NULL_TREE if no simplification was possible. */
10288 fold_builtin_3 (location_t loc
, tree fndecl
,
10289 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10291 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10292 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10296 CASE_FLT_FN (BUILT_IN_SINCOS
):
10297 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10299 CASE_FLT_FN (BUILT_IN_FMA
):
10300 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10303 CASE_FLT_FN (BUILT_IN_REMQUO
):
10304 if (validate_arg (arg0
, REAL_TYPE
)
10305 && validate_arg (arg1
, REAL_TYPE
)
10306 && validate_arg (arg2
, POINTER_TYPE
))
10307 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10310 case BUILT_IN_STRNCAT
:
10311 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10313 case BUILT_IN_STRNCMP
:
10314 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10316 case BUILT_IN_MEMCHR
:
10317 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10319 case BUILT_IN_BCMP
:
10320 case BUILT_IN_MEMCMP
:
10321 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10323 case BUILT_IN_PRINTF_CHK
:
10324 case BUILT_IN_VPRINTF_CHK
:
10325 if (!validate_arg (arg0
, INTEGER_TYPE
)
10326 || TREE_SIDE_EFFECTS (arg0
))
10329 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10332 case BUILT_IN_FPRINTF
:
10333 case BUILT_IN_FPRINTF_UNLOCKED
:
10334 case BUILT_IN_VFPRINTF
:
10335 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10338 case BUILT_IN_FPRINTF_CHK
:
10339 case BUILT_IN_VFPRINTF_CHK
:
10340 if (!validate_arg (arg1
, INTEGER_TYPE
)
10341 || TREE_SIDE_EFFECTS (arg1
))
10344 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10347 case BUILT_IN_EXPECT
:
10348 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10356 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10357 ARG2, and ARG3. IGNORE is true if the result of the function call is
10358 ignored. This function returns NULL_TREE if no simplification was
10362 fold_builtin_4 (location_t loc
, tree fndecl
,
10363 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10365 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10369 case BUILT_IN_STRNCAT_CHK
:
10370 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10372 case BUILT_IN_FPRINTF_CHK
:
10373 case BUILT_IN_VFPRINTF_CHK
:
10374 if (!validate_arg (arg1
, INTEGER_TYPE
)
10375 || TREE_SIDE_EFFECTS (arg1
))
10378 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10388 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10389 arguments, where NARGS <= 4. IGNORE is true if the result of the
10390 function call is ignored. This function returns NULL_TREE if no
10391 simplification was possible. Note that this only folds builtins with
10392 fixed argument patterns. Foldings that do varargs-to-varargs
10393 transformations, or that match calls with more than 4 arguments,
10394 need to be handled with fold_builtin_varargs instead. */
10396 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10399 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10401 tree ret
= NULL_TREE
;
10406 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
10409 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
10412 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
10415 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
10418 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
10426 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10427 SET_EXPR_LOCATION (ret
, loc
);
10428 TREE_NO_WARNING (ret
) = 1;
10434 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10435 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10436 of arguments in ARGS to be omitted. OLDNARGS is the number of
10437 elements in ARGS. */
10440 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10441 int skip
, tree fndecl
, int n
, va_list newargs
)
10443 int nargs
= oldnargs
- skip
+ n
;
10450 buffer
= XALLOCAVEC (tree
, nargs
);
10451 for (i
= 0; i
< n
; i
++)
10452 buffer
[i
] = va_arg (newargs
, tree
);
10453 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10454 buffer
[i
] = args
[j
];
10457 buffer
= args
+ skip
;
10459 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10462 /* Return true if FNDECL shouldn't be folded right now.
10463 If a built-in function has an inline attribute always_inline
10464 wrapper, defer folding it after always_inline functions have
10465 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10466 might not be performed. */
10469 avoid_folding_inline_builtin (tree fndecl
)
10471 return (DECL_DECLARED_INLINE_P (fndecl
)
10472 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10474 && !cfun
->always_inline_functions_inlined
10475 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10478 /* A wrapper function for builtin folding that prevents warnings for
10479 "statement without effect" and the like, caused by removing the
10480 call node earlier than the warning is generated. */
10483 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10485 tree ret
= NULL_TREE
;
10486 tree fndecl
= get_callee_fndecl (exp
);
10488 && TREE_CODE (fndecl
) == FUNCTION_DECL
10489 && DECL_BUILT_IN (fndecl
)
10490 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10491 yet. Defer folding until we see all the arguments
10492 (after inlining). */
10493 && !CALL_EXPR_VA_ARG_PACK (exp
))
10495 int nargs
= call_expr_nargs (exp
);
10497 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10498 instead last argument is __builtin_va_arg_pack (). Defer folding
10499 even in that case, until arguments are finalized. */
10500 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10502 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10504 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10505 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10506 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10510 if (avoid_folding_inline_builtin (fndecl
))
10513 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10514 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10515 CALL_EXPR_ARGP (exp
), ignore
);
10518 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10520 tree
*args
= CALL_EXPR_ARGP (exp
);
10521 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10524 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
10532 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10533 N arguments are passed in the array ARGARRAY. */
10536 fold_builtin_call_array (location_t loc
, tree type
,
10541 tree ret
= NULL_TREE
;
10544 if (TREE_CODE (fn
) == ADDR_EXPR
)
10546 tree fndecl
= TREE_OPERAND (fn
, 0);
10547 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10548 && DECL_BUILT_IN (fndecl
))
10550 /* If last argument is __builtin_va_arg_pack (), arguments to this
10551 function are not finalized yet. Defer folding until they are. */
10552 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10554 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10556 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10557 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10558 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10559 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10561 if (avoid_folding_inline_builtin (fndecl
))
10562 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10563 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10565 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10569 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10571 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10573 /* First try the transformations that don't require consing up
10575 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10580 /* If we got this far, we need to build an exp. */
10581 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10582 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
10583 return ret
? ret
: exp
;
10587 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10590 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10591 along with N new arguments specified as the "..." parameters. SKIP
10592 is the number of arguments in EXP to be omitted. This function is used
10593 to do varargs-to-varargs transformations. */
10596 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10602 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10603 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10609 /* Validate a single argument ARG against a tree code CODE representing
10613 validate_arg (const_tree arg
, enum tree_code code
)
10617 else if (code
== POINTER_TYPE
)
10618 return POINTER_TYPE_P (TREE_TYPE (arg
));
10619 else if (code
== INTEGER_TYPE
)
10620 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10621 return code
== TREE_CODE (TREE_TYPE (arg
));
10624 /* This function validates the types of a function call argument list
10625 against a specified list of tree_codes. If the last specifier is a 0,
10626 that represents an ellipses, otherwise the last specifier must be a
10629 This is the GIMPLE version of validate_arglist. Eventually we want to
10630 completely convert builtins.c to work from GIMPLEs and the tree based
10631 validate_arglist will then be removed. */
10634 validate_gimple_arglist (const_gimple call
, ...)
10636 enum tree_code code
;
10642 va_start (ap
, call
);
10647 code
= (enum tree_code
) va_arg (ap
, int);
10651 /* This signifies an ellipses, any further arguments are all ok. */
10655 /* This signifies an endlink, if no arguments remain, return
10656 true, otherwise return false. */
10657 res
= (i
== gimple_call_num_args (call
));
10660 /* If no parameters remain or the parameter's code does not
10661 match the specified code, return false. Otherwise continue
10662 checking any remaining arguments. */
10663 arg
= gimple_call_arg (call
, i
++);
10664 if (!validate_arg (arg
, code
))
10671 /* We need gotos here since we can only have one VA_CLOSE in a
10679 /* Default target-specific builtin expander that does nothing. */
10682 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10683 rtx target ATTRIBUTE_UNUSED
,
10684 rtx subtarget ATTRIBUTE_UNUSED
,
10685 enum machine_mode mode ATTRIBUTE_UNUSED
,
10686 int ignore ATTRIBUTE_UNUSED
)
10691 /* Returns true is EXP represents data that would potentially reside
10692 in a readonly section. */
10695 readonly_data_expr (tree exp
)
10699 if (TREE_CODE (exp
) != ADDR_EXPR
)
10702 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10706 /* Make sure we call decl_readonly_section only for trees it
10707 can handle (since it returns true for everything it doesn't
10709 if (TREE_CODE (exp
) == STRING_CST
10710 || TREE_CODE (exp
) == CONSTRUCTOR
10711 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10712 return decl_readonly_section (exp
, 0);
10717 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10718 to the call, and TYPE is its return type.
10720 Return NULL_TREE if no simplification was possible, otherwise return the
10721 simplified form of the call as a tree.
10723 The simplified form may be a constant or other expression which
10724 computes the same value, but in a more efficient manner (including
10725 calls to other builtin functions).
10727 The call may contain arguments which need to be evaluated, but
10728 which are not useful to determine the result of the call. In
10729 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10730 COMPOUND_EXPR will be an argument which must be evaluated.
10731 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10732 COMPOUND_EXPR in the chain will contain the tree for the simplified
10733 form of the builtin function call. */
10736 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10738 if (!validate_arg (s1
, POINTER_TYPE
)
10739 || !validate_arg (s2
, POINTER_TYPE
))
10744 const char *p1
, *p2
;
10746 p2
= c_getstr (s2
);
10750 p1
= c_getstr (s1
);
10753 const char *r
= strstr (p1
, p2
);
10757 return build_int_cst (TREE_TYPE (s1
), 0);
10759 /* Return an offset into the constant string argument. */
10760 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10761 return fold_convert_loc (loc
, type
, tem
);
10764 /* The argument is const char *, and the result is char *, so we need
10765 a type conversion here to avoid a warning. */
10767 return fold_convert_loc (loc
, type
, s1
);
10772 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10776 /* New argument list transforming strstr(s1, s2) to
10777 strchr(s1, s2[0]). */
10778 return build_call_expr_loc (loc
, fn
, 2, s1
,
10779 build_int_cst (integer_type_node
, p2
[0]));
10783 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10784 the call, and TYPE is its return type.
10786 Return NULL_TREE if no simplification was possible, otherwise return the
10787 simplified form of the call as a tree.
10789 The simplified form may be a constant or other expression which
10790 computes the same value, but in a more efficient manner (including
10791 calls to other builtin functions).
10793 The call may contain arguments which need to be evaluated, but
10794 which are not useful to determine the result of the call. In
10795 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10796 COMPOUND_EXPR will be an argument which must be evaluated.
10797 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10798 COMPOUND_EXPR in the chain will contain the tree for the simplified
10799 form of the builtin function call. */
10802 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10804 if (!validate_arg (s1
, POINTER_TYPE
)
10805 || !validate_arg (s2
, INTEGER_TYPE
))
10811 if (TREE_CODE (s2
) != INTEGER_CST
)
10814 p1
= c_getstr (s1
);
10821 if (target_char_cast (s2
, &c
))
10824 r
= strchr (p1
, c
);
10827 return build_int_cst (TREE_TYPE (s1
), 0);
10829 /* Return an offset into the constant string argument. */
10830 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10831 return fold_convert_loc (loc
, type
, tem
);
10837 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10838 the call, and TYPE is its return type.
10840 Return NULL_TREE if no simplification was possible, otherwise return the
10841 simplified form of the call as a tree.
10843 The simplified form may be a constant or other expression which
10844 computes the same value, but in a more efficient manner (including
10845 calls to other builtin functions).
10847 The call may contain arguments which need to be evaluated, but
10848 which are not useful to determine the result of the call. In
10849 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10850 COMPOUND_EXPR will be an argument which must be evaluated.
10851 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10852 COMPOUND_EXPR in the chain will contain the tree for the simplified
10853 form of the builtin function call. */
10856 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10858 if (!validate_arg (s1
, POINTER_TYPE
)
10859 || !validate_arg (s2
, INTEGER_TYPE
))
10866 if (TREE_CODE (s2
) != INTEGER_CST
)
10869 p1
= c_getstr (s1
);
10876 if (target_char_cast (s2
, &c
))
10879 r
= strrchr (p1
, c
);
10882 return build_int_cst (TREE_TYPE (s1
), 0);
10884 /* Return an offset into the constant string argument. */
10885 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10886 return fold_convert_loc (loc
, type
, tem
);
10889 if (! integer_zerop (s2
))
10892 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10896 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10897 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10901 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10902 to the call, and TYPE is its return type.
10904 Return NULL_TREE if no simplification was possible, otherwise return the
10905 simplified form of the call as a tree.
10907 The simplified form may be a constant or other expression which
10908 computes the same value, but in a more efficient manner (including
10909 calls to other builtin functions).
10911 The call may contain arguments which need to be evaluated, but
10912 which are not useful to determine the result of the call. In
10913 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10914 COMPOUND_EXPR will be an argument which must be evaluated.
10915 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10916 COMPOUND_EXPR in the chain will contain the tree for the simplified
10917 form of the builtin function call. */
10920 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10922 if (!validate_arg (s1
, POINTER_TYPE
)
10923 || !validate_arg (s2
, POINTER_TYPE
))
10928 const char *p1
, *p2
;
10930 p2
= c_getstr (s2
);
10934 p1
= c_getstr (s1
);
10937 const char *r
= strpbrk (p1
, p2
);
10941 return build_int_cst (TREE_TYPE (s1
), 0);
10943 /* Return an offset into the constant string argument. */
10944 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10945 return fold_convert_loc (loc
, type
, tem
);
10949 /* strpbrk(x, "") == NULL.
10950 Evaluate and ignore s1 in case it had side-effects. */
10951 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
10954 return NULL_TREE
; /* Really call strpbrk. */
10956 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10960 /* New argument list transforming strpbrk(s1, s2) to
10961 strchr(s1, s2[0]). */
10962 return build_call_expr_loc (loc
, fn
, 2, s1
,
10963 build_int_cst (integer_type_node
, p2
[0]));
10967 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10968 arguments to the call.
10970 Return NULL_TREE if no simplification was possible, otherwise return the
10971 simplified form of the call as a tree.
10973 The simplified form may be a constant or other expression which
10974 computes the same value, but in a more efficient manner (including
10975 calls to other builtin functions).
10977 The call may contain arguments which need to be evaluated, but
10978 which are not useful to determine the result of the call. In
10979 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10980 COMPOUND_EXPR will be an argument which must be evaluated.
10981 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10982 COMPOUND_EXPR in the chain will contain the tree for the simplified
10983 form of the builtin function call. */
10986 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
10988 if (!validate_arg (dst
, POINTER_TYPE
)
10989 || !validate_arg (src
, POINTER_TYPE
)
10990 || !validate_arg (len
, INTEGER_TYPE
))
10994 const char *p
= c_getstr (src
);
10996 /* If the requested length is zero, or the src parameter string
10997 length is zero, return the dst parameter. */
10998 if (integer_zerop (len
) || (p
&& *p
== '\0'))
10999 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11001 /* If the requested len is greater than or equal to the string
11002 length, call strcat. */
11003 if (TREE_CODE (len
) == INTEGER_CST
&& p
11004 && compare_tree_int (len
, strlen (p
)) >= 0)
11006 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11008 /* If the replacement _DECL isn't initialized, don't do the
11013 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11019 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11022 Return NULL_TREE if no simplification was possible, otherwise return the
11023 simplified form of the call as a tree.
11025 The simplified form may be a constant or other expression which
11026 computes the same value, but in a more efficient manner (including
11027 calls to other builtin functions).
11029 The call may contain arguments which need to be evaluated, but
11030 which are not useful to determine the result of the call. In
11031 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11032 COMPOUND_EXPR will be an argument which must be evaluated.
11033 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11034 COMPOUND_EXPR in the chain will contain the tree for the simplified
11035 form of the builtin function call. */
11038 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11040 if (!validate_arg (s1
, POINTER_TYPE
)
11041 || !validate_arg (s2
, POINTER_TYPE
))
11045 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11047 /* If both arguments are constants, evaluate at compile-time. */
11050 const size_t r
= strspn (p1
, p2
);
11051 return build_int_cst (size_type_node
, r
);
11054 /* If either argument is "", return NULL_TREE. */
11055 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11056 /* Evaluate and ignore both arguments in case either one has
11058 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11064 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11067 Return NULL_TREE if no simplification was possible, otherwise return the
11068 simplified form of the call as a tree.
11070 The simplified form may be a constant or other expression which
11071 computes the same value, but in a more efficient manner (including
11072 calls to other builtin functions).
11074 The call may contain arguments which need to be evaluated, but
11075 which are not useful to determine the result of the call. In
11076 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11077 COMPOUND_EXPR will be an argument which must be evaluated.
11078 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11079 COMPOUND_EXPR in the chain will contain the tree for the simplified
11080 form of the builtin function call. */
11083 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11085 if (!validate_arg (s1
, POINTER_TYPE
)
11086 || !validate_arg (s2
, POINTER_TYPE
))
11090 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11092 /* If both arguments are constants, evaluate at compile-time. */
11095 const size_t r
= strcspn (p1
, p2
);
11096 return build_int_cst (size_type_node
, r
);
11099 /* If the first argument is "", return NULL_TREE. */
11100 if (p1
&& *p1
== '\0')
11102 /* Evaluate and ignore argument s2 in case it has
11104 return omit_one_operand_loc (loc
, size_type_node
,
11105 size_zero_node
, s2
);
11108 /* If the second argument is "", return __builtin_strlen(s1). */
11109 if (p2
&& *p2
== '\0')
11111 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11113 /* If the replacement _DECL isn't initialized, don't do the
11118 return build_call_expr_loc (loc
, fn
, 1, s1
);
11124 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11125 produced. False otherwise. This is done so that we don't output the error
11126 or warning twice or three times. */
11129 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11131 tree fntype
= TREE_TYPE (current_function_decl
);
11132 int nargs
= call_expr_nargs (exp
);
11134 /* There is good chance the current input_location points inside the
11135 definition of the va_start macro (perhaps on the token for
11136 builtin) in a system header, so warnings will not be emitted.
11137 Use the location in real source code. */
11138 source_location current_location
=
11139 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11142 if (!stdarg_p (fntype
))
11144 error ("%<va_start%> used in function with fixed args");
11150 if (va_start_p
&& (nargs
!= 2))
11152 error ("wrong number of arguments to function %<va_start%>");
11155 arg
= CALL_EXPR_ARG (exp
, 1);
11157 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11158 when we checked the arguments and if needed issued a warning. */
11163 /* Evidently an out of date version of <stdarg.h>; can't validate
11164 va_start's second argument, but can still work as intended. */
11165 warning_at (current_location
,
11167 "%<__builtin_next_arg%> called without an argument");
11170 else if (nargs
> 1)
11172 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11175 arg
= CALL_EXPR_ARG (exp
, 0);
11178 if (TREE_CODE (arg
) == SSA_NAME
)
11179 arg
= SSA_NAME_VAR (arg
);
11181 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11182 or __builtin_next_arg (0) the first time we see it, after checking
11183 the arguments and if needed issuing a warning. */
11184 if (!integer_zerop (arg
))
11186 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11188 /* Strip off all nops for the sake of the comparison. This
11189 is not quite the same as STRIP_NOPS. It does more.
11190 We must also strip off INDIRECT_EXPR for C++ reference
11192 while (CONVERT_EXPR_P (arg
)
11193 || TREE_CODE (arg
) == INDIRECT_REF
)
11194 arg
= TREE_OPERAND (arg
, 0);
11195 if (arg
!= last_parm
)
11197 /* FIXME: Sometimes with the tree optimizers we can get the
11198 not the last argument even though the user used the last
11199 argument. We just warn and set the arg to be the last
11200 argument so that we will get wrong-code because of
11202 warning_at (current_location
,
11204 "second parameter of %<va_start%> not last named argument");
11207 /* Undefined by C99 7.15.1.4p4 (va_start):
11208 "If the parameter parmN is declared with the register storage
11209 class, with a function or array type, or with a type that is
11210 not compatible with the type that results after application of
11211 the default argument promotions, the behavior is undefined."
11213 else if (DECL_REGISTER (arg
))
11215 warning_at (current_location
,
11217 "undefined behaviour when second parameter of "
11218 "%<va_start%> is declared with %<register%> storage");
11221 /* We want to verify the second parameter just once before the tree
11222 optimizers are run and then avoid keeping it in the tree,
11223 as otherwise we could warn even for correct code like:
11224 void foo (int i, ...)
11225 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11227 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11229 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11235 /* Expand a call EXP to __builtin_object_size. */
11238 expand_builtin_object_size (tree exp
)
11241 int object_size_type
;
11242 tree fndecl
= get_callee_fndecl (exp
);
11244 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11246 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11248 expand_builtin_trap ();
11252 ost
= CALL_EXPR_ARG (exp
, 1);
11255 if (TREE_CODE (ost
) != INTEGER_CST
11256 || tree_int_cst_sgn (ost
) < 0
11257 || compare_tree_int (ost
, 3) > 0)
11259 error ("%Klast argument of %D is not integer constant between 0 and 3",
11261 expand_builtin_trap ();
11265 object_size_type
= tree_to_shwi (ost
);
11267 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11270 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11271 FCODE is the BUILT_IN_* to use.
11272 Return NULL_RTX if we failed; the caller should emit a normal call,
11273 otherwise try to get the result in TARGET, if convenient (and in
11274 mode MODE if that's convenient). */
11277 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11278 enum built_in_function fcode
)
11280 tree dest
, src
, len
, size
;
11282 if (!validate_arglist (exp
,
11284 fcode
== BUILT_IN_MEMSET_CHK
11285 ? INTEGER_TYPE
: POINTER_TYPE
,
11286 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11289 dest
= CALL_EXPR_ARG (exp
, 0);
11290 src
= CALL_EXPR_ARG (exp
, 1);
11291 len
= CALL_EXPR_ARG (exp
, 2);
11292 size
= CALL_EXPR_ARG (exp
, 3);
11294 if (! tree_fits_uhwi_p (size
))
11297 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11301 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11303 warning_at (tree_nonartificial_location (exp
),
11304 0, "%Kcall to %D will always overflow destination buffer",
11305 exp
, get_callee_fndecl (exp
));
11310 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11311 mem{cpy,pcpy,move,set} is available. */
11314 case BUILT_IN_MEMCPY_CHK
:
11315 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11317 case BUILT_IN_MEMPCPY_CHK
:
11318 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11320 case BUILT_IN_MEMMOVE_CHK
:
11321 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11323 case BUILT_IN_MEMSET_CHK
:
11324 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11333 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11334 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11335 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11336 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11338 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11342 unsigned int dest_align
= get_pointer_alignment (dest
);
11344 /* If DEST is not a pointer type, call the normal function. */
11345 if (dest_align
== 0)
11348 /* If SRC and DEST are the same (and not volatile), do nothing. */
11349 if (operand_equal_p (src
, dest
, 0))
11353 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11355 /* Evaluate and ignore LEN in case it has side-effects. */
11356 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11357 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11360 expr
= fold_build_pointer_plus (dest
, len
);
11361 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11364 /* __memmove_chk special case. */
11365 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11367 unsigned int src_align
= get_pointer_alignment (src
);
11369 if (src_align
== 0)
11372 /* If src is categorized for a readonly section we can use
11373 normal __memcpy_chk. */
11374 if (readonly_data_expr (src
))
11376 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11379 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11380 dest
, src
, len
, size
);
11381 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11382 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11383 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11390 /* Emit warning if a buffer overflow is detected at compile time. */
11393 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11397 location_t loc
= tree_nonartificial_location (exp
);
11401 case BUILT_IN_STRCPY_CHK
:
11402 case BUILT_IN_STPCPY_CHK
:
11403 /* For __strcat_chk the warning will be emitted only if overflowing
11404 by at least strlen (dest) + 1 bytes. */
11405 case BUILT_IN_STRCAT_CHK
:
11406 len
= CALL_EXPR_ARG (exp
, 1);
11407 size
= CALL_EXPR_ARG (exp
, 2);
11410 case BUILT_IN_STRNCAT_CHK
:
11411 case BUILT_IN_STRNCPY_CHK
:
11412 case BUILT_IN_STPNCPY_CHK
:
11413 len
= CALL_EXPR_ARG (exp
, 2);
11414 size
= CALL_EXPR_ARG (exp
, 3);
11416 case BUILT_IN_SNPRINTF_CHK
:
11417 case BUILT_IN_VSNPRINTF_CHK
:
11418 len
= CALL_EXPR_ARG (exp
, 1);
11419 size
= CALL_EXPR_ARG (exp
, 3);
11422 gcc_unreachable ();
11428 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11433 len
= c_strlen (len
, 1);
11434 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11437 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11439 tree src
= CALL_EXPR_ARG (exp
, 1);
11440 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11442 src
= c_strlen (src
, 1);
11443 if (! src
|| ! tree_fits_uhwi_p (src
))
11445 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11446 exp
, get_callee_fndecl (exp
));
11449 else if (tree_int_cst_lt (src
, size
))
11452 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11455 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11456 exp
, get_callee_fndecl (exp
));
11459 /* Emit warning if a buffer overflow is detected at compile time
11460 in __sprintf_chk/__vsprintf_chk calls. */
11463 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11465 tree size
, len
, fmt
;
11466 const char *fmt_str
;
11467 int nargs
= call_expr_nargs (exp
);
11469 /* Verify the required arguments in the original call. */
11473 size
= CALL_EXPR_ARG (exp
, 2);
11474 fmt
= CALL_EXPR_ARG (exp
, 3);
11476 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11479 /* Check whether the format is a literal string constant. */
11480 fmt_str
= c_getstr (fmt
);
11481 if (fmt_str
== NULL
)
11484 if (!init_target_chars ())
11487 /* If the format doesn't contain % args or %%, we know its size. */
11488 if (strchr (fmt_str
, target_percent
) == 0)
11489 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11490 /* If the format is "%s" and first ... argument is a string literal,
11492 else if (fcode
== BUILT_IN_SPRINTF_CHK
11493 && strcmp (fmt_str
, target_percent_s
) == 0)
11499 arg
= CALL_EXPR_ARG (exp
, 4);
11500 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11503 len
= c_strlen (arg
, 1);
11504 if (!len
|| ! tree_fits_uhwi_p (len
))
11510 if (! tree_int_cst_lt (len
, size
))
11511 warning_at (tree_nonartificial_location (exp
),
11512 0, "%Kcall to %D will always overflow destination buffer",
11513 exp
, get_callee_fndecl (exp
));
11516 /* Emit warning if a free is called with address of a variable. */
11519 maybe_emit_free_warning (tree exp
)
11521 tree arg
= CALL_EXPR_ARG (exp
, 0);
11524 if (TREE_CODE (arg
) != ADDR_EXPR
)
11527 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11528 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11531 if (SSA_VAR_P (arg
))
11532 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11533 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11535 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11536 "%Kattempt to free a non-heap object", exp
);
11539 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11543 fold_builtin_object_size (tree ptr
, tree ost
)
11545 unsigned HOST_WIDE_INT bytes
;
11546 int object_size_type
;
11548 if (!validate_arg (ptr
, POINTER_TYPE
)
11549 || !validate_arg (ost
, INTEGER_TYPE
))
11554 if (TREE_CODE (ost
) != INTEGER_CST
11555 || tree_int_cst_sgn (ost
) < 0
11556 || compare_tree_int (ost
, 3) > 0)
11559 object_size_type
= tree_to_shwi (ost
);
11561 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11562 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11563 and (size_t) 0 for types 2 and 3. */
11564 if (TREE_SIDE_EFFECTS (ptr
))
11565 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11567 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11569 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11570 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11571 return build_int_cstu (size_type_node
, bytes
);
11573 else if (TREE_CODE (ptr
) == SSA_NAME
)
11575 /* If object size is not known yet, delay folding until
11576 later. Maybe subsequent passes will help determining
11578 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11579 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11580 && wi::fits_to_tree_p (bytes
, size_type_node
))
11581 return build_int_cstu (size_type_node
, bytes
);
11587 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11591 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
11592 tree dest
, tree src
, tree len
, tree size
)
11597 if (!validate_arg (dest
, POINTER_TYPE
)
11598 || !validate_arg (src
, POINTER_TYPE
)
11599 || !validate_arg (size
, INTEGER_TYPE
)
11600 || !validate_arg (size
, INTEGER_TYPE
))
11603 p
= c_getstr (src
);
11604 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11605 if (p
&& *p
== '\0')
11606 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11607 else if (integer_zerop (len
))
11608 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11610 if (! tree_fits_uhwi_p (size
))
11613 if (! integer_all_onesp (size
))
11615 tree src_len
= c_strlen (src
, 1);
11617 && tree_fits_uhwi_p (src_len
)
11618 && tree_fits_uhwi_p (len
)
11619 && ! tree_int_cst_lt (len
, src_len
))
11621 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11622 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
11626 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
11631 /* If __builtin_strncat_chk is used, assume strncat is available. */
11632 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
11636 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
11639 /* Builtins with folding operations that operate on "..." arguments
11640 need special handling; we need to store the arguments in a convenient
11641 data structure before attempting any folding. Fortunately there are
11642 only a few builtins that fall into this category. FNDECL is the
11643 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11644 result of the function call is ignored. */
11647 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
11648 bool ignore ATTRIBUTE_UNUSED
)
11650 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11651 tree ret
= NULL_TREE
;
11655 case BUILT_IN_FPCLASSIFY
:
11656 ret
= fold_builtin_fpclassify (loc
, exp
);
11664 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11665 SET_EXPR_LOCATION (ret
, loc
);
11666 TREE_NO_WARNING (ret
) = 1;
11672 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11673 FMT and ARG are the arguments to the call; we don't fold cases with
11674 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11676 Return NULL_TREE if no simplification was possible, otherwise return the
11677 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11678 code of the function to be simplified. */
11681 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
11682 tree arg
, bool ignore
,
11683 enum built_in_function fcode
)
11685 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
11686 const char *fmt_str
= NULL
;
11688 /* If the return value is used, don't do the transformation. */
11692 /* Verify the required arguments in the original call. */
11693 if (!validate_arg (fmt
, POINTER_TYPE
))
11696 /* Check whether the format is a literal string constant. */
11697 fmt_str
= c_getstr (fmt
);
11698 if (fmt_str
== NULL
)
11701 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
11703 /* If we're using an unlocked function, assume the other
11704 unlocked functions exist explicitly. */
11705 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
11706 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
11710 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
11711 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
11714 if (!init_target_chars ())
11717 if (strcmp (fmt_str
, target_percent_s
) == 0
11718 || strchr (fmt_str
, target_percent
) == NULL
)
11722 if (strcmp (fmt_str
, target_percent_s
) == 0)
11724 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
11727 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11730 str
= c_getstr (arg
);
11736 /* The format specifier doesn't contain any '%' characters. */
11737 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
11743 /* If the string was "", printf does nothing. */
11744 if (str
[0] == '\0')
11745 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
11747 /* If the string has length of 1, call putchar. */
11748 if (str
[1] == '\0')
11750 /* Given printf("c"), (where c is any one character,)
11751 convert "c"[0] to an int and pass that to the replacement
11753 newarg
= build_int_cst (integer_type_node
, str
[0]);
11755 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
11759 /* If the string was "string\n", call puts("string"). */
11760 size_t len
= strlen (str
);
11761 if ((unsigned char)str
[len
- 1] == target_newline
11762 && (size_t) (int) len
== len
11766 tree offset_node
, string_cst
;
11768 /* Create a NUL-terminated string that's one char shorter
11769 than the original, stripping off the trailing '\n'. */
11770 newarg
= build_string_literal (len
, str
);
11771 string_cst
= string_constant (newarg
, &offset_node
);
11772 gcc_checking_assert (string_cst
11773 && (TREE_STRING_LENGTH (string_cst
)
11775 && integer_zerop (offset_node
)
11777 TREE_STRING_POINTER (string_cst
)[len
- 1]
11778 == target_newline
);
11779 /* build_string_literal creates a new STRING_CST,
11780 modify it in place to avoid double copying. */
11781 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
11782 newstr
[len
- 1] = '\0';
11784 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
11787 /* We'd like to arrange to call fputs(string,stdout) here,
11788 but we need stdout and don't have a way to get it yet. */
11793 /* The other optimizations can be done only on the non-va_list variants. */
11794 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
11797 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11798 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
11800 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11803 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
11806 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11807 else if (strcmp (fmt_str
, target_percent_c
) == 0)
11809 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
11812 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
11818 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
11821 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11822 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11823 more than 3 arguments, and ARG may be null in the 2-argument case.
11825 Return NULL_TREE if no simplification was possible, otherwise return the
11826 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11827 code of the function to be simplified. */
11830 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
11831 tree fmt
, tree arg
, bool ignore
,
11832 enum built_in_function fcode
)
11834 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
11835 const char *fmt_str
= NULL
;
11837 /* If the return value is used, don't do the transformation. */
11841 /* Verify the required arguments in the original call. */
11842 if (!validate_arg (fp
, POINTER_TYPE
))
11844 if (!validate_arg (fmt
, POINTER_TYPE
))
11847 /* Check whether the format is a literal string constant. */
11848 fmt_str
= c_getstr (fmt
);
11849 if (fmt_str
== NULL
)
11852 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
11854 /* If we're using an unlocked function, assume the other
11855 unlocked functions exist explicitly. */
11856 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
11857 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
11861 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
11862 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
11865 if (!init_target_chars ())
11868 /* If the format doesn't contain % args or %%, use strcpy. */
11869 if (strchr (fmt_str
, target_percent
) == NULL
)
11871 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
11875 /* If the format specifier was "", fprintf does nothing. */
11876 if (fmt_str
[0] == '\0')
11878 /* If FP has side-effects, just wait until gimplification is
11880 if (TREE_SIDE_EFFECTS (fp
))
11883 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
11886 /* When "string" doesn't contain %, replace all cases of
11887 fprintf (fp, string) with fputs (string, fp). The fputs
11888 builtin will take care of special cases like length == 1. */
11890 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
11893 /* The other optimizations can be done only on the non-va_list variants. */
11894 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
11897 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11898 else if (strcmp (fmt_str
, target_percent_s
) == 0)
11900 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11903 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
11906 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11907 else if (strcmp (fmt_str
, target_percent_c
) == 0)
11909 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
11912 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
11917 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
11920 /* Initialize format string characters in the target charset. */
11923 init_target_chars (void)
11928 target_newline
= lang_hooks
.to_target_charset ('\n');
11929 target_percent
= lang_hooks
.to_target_charset ('%');
11930 target_c
= lang_hooks
.to_target_charset ('c');
11931 target_s
= lang_hooks
.to_target_charset ('s');
11932 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11936 target_percent_c
[0] = target_percent
;
11937 target_percent_c
[1] = target_c
;
11938 target_percent_c
[2] = '\0';
11940 target_percent_s
[0] = target_percent
;
11941 target_percent_s
[1] = target_s
;
11942 target_percent_s
[2] = '\0';
11944 target_percent_s_newline
[0] = target_percent
;
11945 target_percent_s_newline
[1] = target_s
;
11946 target_percent_s_newline
[2] = target_newline
;
11947 target_percent_s_newline
[3] = '\0';
11954 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11955 and no overflow/underflow occurred. INEXACT is true if M was not
11956 exactly calculated. TYPE is the tree type for the result. This
11957 function assumes that you cleared the MPFR flags and then
11958 calculated M to see if anything subsequently set a flag prior to
11959 entering this function. Return NULL_TREE if any checks fail. */
11962 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11964 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11965 overflow/underflow occurred. If -frounding-math, proceed iff the
11966 result of calling FUNC was exact. */
11967 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11968 && (!flag_rounding_math
|| !inexact
))
11970 REAL_VALUE_TYPE rr
;
11972 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11973 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11974 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11975 but the mpft_t is not, then we underflowed in the
11977 if (real_isfinite (&rr
)
11978 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11980 REAL_VALUE_TYPE rmode
;
11982 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11983 /* Proceed iff the specified mode can hold the value. */
11984 if (real_identical (&rmode
, &rr
))
11985 return build_real (type
, rmode
);
11991 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11992 number and no overflow/underflow occurred. INEXACT is true if M
11993 was not exactly calculated. TYPE is the tree type for the result.
11994 This function assumes that you cleared the MPFR flags and then
11995 calculated M to see if anything subsequently set a flag prior to
11996 entering this function. Return NULL_TREE if any checks fail, if
11997 FORCE_CONVERT is true, then bypass the checks. */
12000 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
12002 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12003 overflow/underflow occurred. If -frounding-math, proceed iff the
12004 result of calling FUNC was exact. */
12006 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
12007 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12008 && (!flag_rounding_math
|| !inexact
)))
12010 REAL_VALUE_TYPE re
, im
;
12012 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
12013 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
12014 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12015 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12016 but the mpft_t is not, then we underflowed in the
12019 || (real_isfinite (&re
) && real_isfinite (&im
)
12020 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
12021 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
12023 REAL_VALUE_TYPE re_mode
, im_mode
;
12025 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
12026 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
12027 /* Proceed iff the specified mode can hold the value. */
12029 || (real_identical (&re_mode
, &re
)
12030 && real_identical (&im_mode
, &im
)))
12031 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
12032 build_real (TREE_TYPE (type
), im_mode
));
12038 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12039 FUNC on it and return the resulting value as a tree with type TYPE.
12040 If MIN and/or MAX are not NULL, then the supplied ARG must be
12041 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12042 acceptable values, otherwise they are not. The mpfr precision is
12043 set to the precision of TYPE. We assume that function FUNC returns
12044 zero if the result could be calculated exactly within the requested
12048 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12049 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12052 tree result
= NULL_TREE
;
12056 /* To proceed, MPFR must exactly represent the target floating point
12057 format, which only happens when the target base equals two. */
12058 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12059 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12061 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12063 if (real_isfinite (ra
)
12064 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12065 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12067 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12068 const int prec
= fmt
->p
;
12069 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12073 mpfr_init2 (m
, prec
);
12074 mpfr_from_real (m
, ra
, GMP_RNDN
);
12075 mpfr_clear_flags ();
12076 inexact
= func (m
, m
, rnd
);
12077 result
= do_mpfr_ckconv (m
, type
, inexact
);
12085 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12086 FUNC on it and return the resulting value as a tree with type TYPE.
12087 The mpfr precision is set to the precision of TYPE. We assume that
12088 function FUNC returns zero if the result could be calculated
12089 exactly within the requested precision. */
12092 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12093 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12095 tree result
= NULL_TREE
;
12100 /* To proceed, MPFR must exactly represent the target floating point
12101 format, which only happens when the target base equals two. */
12102 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12103 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12104 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12106 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12107 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12109 if (real_isfinite (ra1
) && real_isfinite (ra2
))
12111 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12112 const int prec
= fmt
->p
;
12113 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12117 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12118 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12119 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12120 mpfr_clear_flags ();
12121 inexact
= func (m1
, m1
, m2
, rnd
);
12122 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12123 mpfr_clears (m1
, m2
, NULL
);
12130 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12131 FUNC on it and return the resulting value as a tree with type TYPE.
12132 The mpfr precision is set to the precision of TYPE. We assume that
12133 function FUNC returns zero if the result could be calculated
12134 exactly within the requested precision. */
12137 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12138 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12140 tree result
= NULL_TREE
;
12146 /* To proceed, MPFR must exactly represent the target floating point
12147 format, which only happens when the target base equals two. */
12148 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12149 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12150 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12151 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12153 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12154 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12155 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12157 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
12159 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12160 const int prec
= fmt
->p
;
12161 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12165 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12166 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12167 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12168 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
12169 mpfr_clear_flags ();
12170 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
12171 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12172 mpfr_clears (m1
, m2
, m3
, NULL
);
12179 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12180 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12181 If ARG_SINP and ARG_COSP are NULL then the result is returned
12182 as a complex value.
12183 The type is taken from the type of ARG and is used for setting the
12184 precision of the calculation and results. */
12187 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12189 tree
const type
= TREE_TYPE (arg
);
12190 tree result
= NULL_TREE
;
12194 /* To proceed, MPFR must exactly represent the target floating point
12195 format, which only happens when the target base equals two. */
12196 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12197 && TREE_CODE (arg
) == REAL_CST
12198 && !TREE_OVERFLOW (arg
))
12200 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12202 if (real_isfinite (ra
))
12204 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12205 const int prec
= fmt
->p
;
12206 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12207 tree result_s
, result_c
;
12211 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12212 mpfr_from_real (m
, ra
, GMP_RNDN
);
12213 mpfr_clear_flags ();
12214 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
12215 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12216 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12217 mpfr_clears (m
, ms
, mc
, NULL
);
12218 if (result_s
&& result_c
)
12220 /* If we are to return in a complex value do so. */
12221 if (!arg_sinp
&& !arg_cosp
)
12222 return build_complex (build_complex_type (type
),
12223 result_c
, result_s
);
12225 /* Dereference the sin/cos pointer arguments. */
12226 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12227 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12228 /* Proceed if valid pointer type were passed in. */
12229 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12230 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12232 /* Set the values. */
12233 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12235 TREE_SIDE_EFFECTS (result_s
) = 1;
12236 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12238 TREE_SIDE_EFFECTS (result_c
) = 1;
12239 /* Combine the assignments into a compound expr. */
12240 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12241 result_s
, result_c
));
12249 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12250 two-argument mpfr order N Bessel function FUNC on them and return
12251 the resulting value as a tree with type TYPE. The mpfr precision
12252 is set to the precision of TYPE. We assume that function FUNC
12253 returns zero if the result could be calculated exactly within the
12254 requested precision. */
12256 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12257 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12258 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12260 tree result
= NULL_TREE
;
12265 /* To proceed, MPFR must exactly represent the target floating point
12266 format, which only happens when the target base equals two. */
12267 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12268 && tree_fits_shwi_p (arg1
)
12269 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12271 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12272 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12275 && real_isfinite (ra
)
12276 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12278 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12279 const int prec
= fmt
->p
;
12280 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12284 mpfr_init2 (m
, prec
);
12285 mpfr_from_real (m
, ra
, GMP_RNDN
);
12286 mpfr_clear_flags ();
12287 inexact
= func (m
, n
, m
, rnd
);
12288 result
= do_mpfr_ckconv (m
, type
, inexact
);
12296 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12297 the pointer *(ARG_QUO) and return the result. The type is taken
12298 from the type of ARG0 and is used for setting the precision of the
12299 calculation and results. */
12302 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12304 tree
const type
= TREE_TYPE (arg0
);
12305 tree result
= NULL_TREE
;
12310 /* To proceed, MPFR must exactly represent the target floating point
12311 format, which only happens when the target base equals two. */
12312 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12313 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12314 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12316 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12317 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12319 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12321 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12322 const int prec
= fmt
->p
;
12323 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12328 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12329 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12330 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12331 mpfr_clear_flags ();
12332 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12333 /* Remquo is independent of the rounding mode, so pass
12334 inexact=0 to do_mpfr_ckconv(). */
12335 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12336 mpfr_clears (m0
, m1
, NULL
);
12339 /* MPFR calculates quo in the host's long so it may
12340 return more bits in quo than the target int can hold
12341 if sizeof(host long) > sizeof(target int). This can
12342 happen even for native compilers in LP64 mode. In
12343 these cases, modulo the quo value with the largest
12344 number that the target int can hold while leaving one
12345 bit for the sign. */
12346 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12347 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12349 /* Dereference the quo pointer argument. */
12350 arg_quo
= build_fold_indirect_ref (arg_quo
);
12351 /* Proceed iff a valid pointer type was passed in. */
12352 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12354 /* Set the value. */
12356 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12357 build_int_cst (TREE_TYPE (arg_quo
),
12359 TREE_SIDE_EFFECTS (result_quo
) = 1;
12360 /* Combine the quo assignment with the rem. */
12361 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12362 result_quo
, result_rem
));
12370 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12371 resulting value as a tree with type TYPE. The mpfr precision is
12372 set to the precision of TYPE. We assume that this mpfr function
12373 returns zero if the result could be calculated exactly within the
12374 requested precision. In addition, the integer pointer represented
12375 by ARG_SG will be dereferenced and set to the appropriate signgam
12379 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12381 tree result
= NULL_TREE
;
12385 /* To proceed, MPFR must exactly represent the target floating point
12386 format, which only happens when the target base equals two. Also
12387 verify ARG is a constant and that ARG_SG is an int pointer. */
12388 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12389 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12390 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12391 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12393 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12395 /* In addition to NaN and Inf, the argument cannot be zero or a
12396 negative integer. */
12397 if (real_isfinite (ra
)
12398 && ra
->cl
!= rvc_zero
12399 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12401 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12402 const int prec
= fmt
->p
;
12403 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12408 mpfr_init2 (m
, prec
);
12409 mpfr_from_real (m
, ra
, GMP_RNDN
);
12410 mpfr_clear_flags ();
12411 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12412 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12418 /* Dereference the arg_sg pointer argument. */
12419 arg_sg
= build_fold_indirect_ref (arg_sg
);
12420 /* Assign the signgam value into *arg_sg. */
12421 result_sg
= fold_build2 (MODIFY_EXPR
,
12422 TREE_TYPE (arg_sg
), arg_sg
,
12423 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12424 TREE_SIDE_EFFECTS (result_sg
) = 1;
12425 /* Combine the signgam assignment with the lgamma result. */
12426 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12427 result_sg
, result_lg
));
12435 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12436 function FUNC on it and return the resulting value as a tree with
12437 type TYPE. The mpfr precision is set to the precision of TYPE. We
12438 assume that function FUNC returns zero if the result could be
12439 calculated exactly within the requested precision. */
12442 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12444 tree result
= NULL_TREE
;
12448 /* To proceed, MPFR must exactly represent the target floating point
12449 format, which only happens when the target base equals two. */
12450 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12451 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12452 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12454 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12455 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12457 if (real_isfinite (re
) && real_isfinite (im
))
12459 const struct real_format
*const fmt
=
12460 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12461 const int prec
= fmt
->p
;
12462 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12463 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12467 mpc_init2 (m
, prec
);
12468 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12469 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12470 mpfr_clear_flags ();
12471 inexact
= func (m
, m
, crnd
);
12472 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12480 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12481 mpc function FUNC on it and return the resulting value as a tree
12482 with type TYPE. The mpfr precision is set to the precision of
12483 TYPE. We assume that function FUNC returns zero if the result
12484 could be calculated exactly within the requested precision. If
12485 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12486 in the arguments and/or results. */
12489 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12490 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12492 tree result
= NULL_TREE
;
12497 /* To proceed, MPFR must exactly represent the target floating point
12498 format, which only happens when the target base equals two. */
12499 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12500 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12501 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12502 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12503 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12505 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12506 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12507 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12508 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12511 || (real_isfinite (re0
) && real_isfinite (im0
)
12512 && real_isfinite (re1
) && real_isfinite (im1
)))
12514 const struct real_format
*const fmt
=
12515 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12516 const int prec
= fmt
->p
;
12517 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12518 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12522 mpc_init2 (m0
, prec
);
12523 mpc_init2 (m1
, prec
);
12524 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12525 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12526 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12527 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12528 mpfr_clear_flags ();
12529 inexact
= func (m0
, m0
, m1
, crnd
);
12530 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12539 /* A wrapper function for builtin folding that prevents warnings for
12540 "statement without effect" and the like, caused by removing the
12541 call node earlier than the warning is generated. */
12544 fold_call_stmt (gimple stmt
, bool ignore
)
12546 tree ret
= NULL_TREE
;
12547 tree fndecl
= gimple_call_fndecl (stmt
);
12548 location_t loc
= gimple_location (stmt
);
12550 && TREE_CODE (fndecl
) == FUNCTION_DECL
12551 && DECL_BUILT_IN (fndecl
)
12552 && !gimple_call_va_arg_pack_p (stmt
))
12554 int nargs
= gimple_call_num_args (stmt
);
12555 tree
*args
= (nargs
> 0
12556 ? gimple_call_arg_ptr (stmt
, 0)
12557 : &error_mark_node
);
12559 if (avoid_folding_inline_builtin (fndecl
))
12561 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12563 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12567 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
12568 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12571 /* Propagate location information from original call to
12572 expansion of builtin. Otherwise things like
12573 maybe_emit_chk_warning, that operate on the expansion
12574 of a builtin, will use the wrong location information. */
12575 if (gimple_has_location (stmt
))
12577 tree realret
= ret
;
12578 if (TREE_CODE (ret
) == NOP_EXPR
)
12579 realret
= TREE_OPERAND (ret
, 0);
12580 if (CAN_HAVE_LOCATION_P (realret
)
12581 && !EXPR_HAS_LOCATION (realret
))
12582 SET_EXPR_LOCATION (realret
, loc
);
12592 /* Look up the function in builtin_decl that corresponds to DECL
12593 and set ASMSPEC as its user assembler name. DECL must be a
12594 function decl that declares a builtin. */
12597 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12600 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12601 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12604 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12605 set_user_assembler_name (builtin
, asmspec
);
12606 switch (DECL_FUNCTION_CODE (decl
))
12608 case BUILT_IN_MEMCPY
:
12609 init_block_move_fn (asmspec
);
12610 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12612 case BUILT_IN_MEMSET
:
12613 init_block_clear_fn (asmspec
);
12614 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12616 case BUILT_IN_MEMMOVE
:
12617 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12619 case BUILT_IN_MEMCMP
:
12620 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12622 case BUILT_IN_ABORT
:
12623 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12626 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12628 set_user_assembler_libfunc ("ffs", asmspec
);
12629 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12630 MODE_INT
, 0), "ffs");
12638 /* Return true if DECL is a builtin that expands to a constant or similarly
12641 is_simple_builtin (tree decl
)
12643 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12644 switch (DECL_FUNCTION_CODE (decl
))
12646 /* Builtins that expand to constants. */
12647 case BUILT_IN_CONSTANT_P
:
12648 case BUILT_IN_EXPECT
:
12649 case BUILT_IN_OBJECT_SIZE
:
12650 case BUILT_IN_UNREACHABLE
:
12651 /* Simple register moves or loads from stack. */
12652 case BUILT_IN_ASSUME_ALIGNED
:
12653 case BUILT_IN_RETURN_ADDRESS
:
12654 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12655 case BUILT_IN_FROB_RETURN_ADDR
:
12656 case BUILT_IN_RETURN
:
12657 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12658 case BUILT_IN_FRAME_ADDRESS
:
12659 case BUILT_IN_VA_END
:
12660 case BUILT_IN_STACK_SAVE
:
12661 case BUILT_IN_STACK_RESTORE
:
12662 /* Exception state returns or moves registers around. */
12663 case BUILT_IN_EH_FILTER
:
12664 case BUILT_IN_EH_POINTER
:
12665 case BUILT_IN_EH_COPY_VALUES
:
12675 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12676 most probably expanded inline into reasonably simple code. This is a
12677 superset of is_simple_builtin. */
12679 is_inexpensive_builtin (tree decl
)
12683 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12685 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12686 switch (DECL_FUNCTION_CODE (decl
))
12689 case BUILT_IN_ALLOCA
:
12690 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12691 case BUILT_IN_BSWAP16
:
12692 case BUILT_IN_BSWAP32
:
12693 case BUILT_IN_BSWAP64
:
12695 case BUILT_IN_CLZIMAX
:
12696 case BUILT_IN_CLZL
:
12697 case BUILT_IN_CLZLL
:
12699 case BUILT_IN_CTZIMAX
:
12700 case BUILT_IN_CTZL
:
12701 case BUILT_IN_CTZLL
:
12703 case BUILT_IN_FFSIMAX
:
12704 case BUILT_IN_FFSL
:
12705 case BUILT_IN_FFSLL
:
12706 case BUILT_IN_IMAXABS
:
12707 case BUILT_IN_FINITE
:
12708 case BUILT_IN_FINITEF
:
12709 case BUILT_IN_FINITEL
:
12710 case BUILT_IN_FINITED32
:
12711 case BUILT_IN_FINITED64
:
12712 case BUILT_IN_FINITED128
:
12713 case BUILT_IN_FPCLASSIFY
:
12714 case BUILT_IN_ISFINITE
:
12715 case BUILT_IN_ISINF_SIGN
:
12716 case BUILT_IN_ISINF
:
12717 case BUILT_IN_ISINFF
:
12718 case BUILT_IN_ISINFL
:
12719 case BUILT_IN_ISINFD32
:
12720 case BUILT_IN_ISINFD64
:
12721 case BUILT_IN_ISINFD128
:
12722 case BUILT_IN_ISNAN
:
12723 case BUILT_IN_ISNANF
:
12724 case BUILT_IN_ISNANL
:
12725 case BUILT_IN_ISNAND32
:
12726 case BUILT_IN_ISNAND64
:
12727 case BUILT_IN_ISNAND128
:
12728 case BUILT_IN_ISNORMAL
:
12729 case BUILT_IN_ISGREATER
:
12730 case BUILT_IN_ISGREATEREQUAL
:
12731 case BUILT_IN_ISLESS
:
12732 case BUILT_IN_ISLESSEQUAL
:
12733 case BUILT_IN_ISLESSGREATER
:
12734 case BUILT_IN_ISUNORDERED
:
12735 case BUILT_IN_VA_ARG_PACK
:
12736 case BUILT_IN_VA_ARG_PACK_LEN
:
12737 case BUILT_IN_VA_COPY
:
12738 case BUILT_IN_TRAP
:
12739 case BUILT_IN_SAVEREGS
:
12740 case BUILT_IN_POPCOUNTL
:
12741 case BUILT_IN_POPCOUNTLL
:
12742 case BUILT_IN_POPCOUNTIMAX
:
12743 case BUILT_IN_POPCOUNT
:
12744 case BUILT_IN_PARITYL
:
12745 case BUILT_IN_PARITYLL
:
12746 case BUILT_IN_PARITYIMAX
:
12747 case BUILT_IN_PARITY
:
12748 case BUILT_IN_LABS
:
12749 case BUILT_IN_LLABS
:
12750 case BUILT_IN_PREFETCH
:
12754 return is_simple_builtin (decl
);