1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "double-int.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "stor-layout.h"
41 #include "tree-object-size.h"
47 #include "hard-reg-set.h"
51 #include "basic-block.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "gimple-expr.h"
60 #include "insn-config.h"
62 #include "insn-codes.h"
67 #include "typeclass.h"
70 #include "langhooks.h"
71 #include "tree-ssanames.h"
73 #include "value-prof.h"
74 #include "diagnostic-core.h"
79 #include "lto-streamer.h"
81 #include "tree-chkp.h"
85 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
87 struct target_builtins default_target_builtins
;
89 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
92 /* Define the names of the builtin function types and codes. */
93 const char *const built_in_class_names
[BUILT_IN_LAST
]
94 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
96 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
97 const char * built_in_names
[(int) END_BUILTINS
] =
99 #include "builtins.def"
103 /* Setup an array of _DECL trees, make sure each element is
104 initialized to NULL_TREE. */
105 builtin_info_type builtin_info
;
107 /* Non-zero if __builtin_constant_p should be folded right away. */
108 bool force_folding_builtin_constant_p
;
110 static rtx
c_readstr (const char *, machine_mode
);
111 static int target_char_cast (tree
, char *);
112 static rtx
get_memory_rtx (tree
, tree
);
113 static int apply_args_size (void);
114 static int apply_result_size (void);
115 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
116 static rtx
result_vector (int, rtx
);
118 static void expand_builtin_update_setjmp_buf (rtx
);
119 static void expand_builtin_prefetch (tree
);
120 static rtx
expand_builtin_apply_args (void);
121 static rtx
expand_builtin_apply_args_1 (void);
122 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
123 static void expand_builtin_return (rtx
);
124 static enum type_class
type_to_class (tree
);
125 static rtx
expand_builtin_classify_type (tree
);
126 static void expand_errno_check (tree
, rtx
);
127 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
128 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
129 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
130 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
131 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
132 static rtx
expand_builtin_sincos (tree
);
133 static rtx
expand_builtin_cexpi (tree
, rtx
);
134 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
135 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
136 static rtx
expand_builtin_next_arg (void);
137 static rtx
expand_builtin_va_start (tree
);
138 static rtx
expand_builtin_va_end (tree
);
139 static rtx
expand_builtin_va_copy (tree
);
140 static rtx
expand_builtin_memcmp (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_strcmp (tree
, rtx
);
142 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
143 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
144 static rtx
expand_builtin_memcpy (tree
, rtx
);
145 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
146 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
147 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
148 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
149 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
150 machine_mode
, int, tree
);
151 static rtx
expand_builtin_strcpy (tree
, rtx
);
152 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
153 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
154 static rtx
expand_builtin_strncpy (tree
, rtx
);
155 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
156 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
157 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
158 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
159 static rtx
expand_builtin_bzero (tree
);
160 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
161 static rtx
expand_builtin_alloca (tree
, bool);
162 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
163 static rtx
expand_builtin_frame_address (tree
, tree
);
164 static tree
stabilize_va_list_loc (location_t
, tree
, int);
165 static rtx
expand_builtin_expect (tree
, rtx
);
166 static tree
fold_builtin_constant_p (tree
);
167 static tree
fold_builtin_classify_type (tree
);
168 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
169 static tree
fold_builtin_inf (location_t
, tree
, int);
170 static tree
fold_builtin_nan (tree
, tree
, int);
171 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
172 static bool validate_arg (const_tree
, enum tree_code code
);
173 static bool integer_valued_real_p (tree
);
174 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
175 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
176 static rtx
expand_builtin_signbit (tree
, rtx
);
177 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
178 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
179 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
180 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
181 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
182 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_tan (tree
, tree
);
184 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
185 static tree
fold_builtin_floor (location_t
, tree
, tree
);
186 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
187 static tree
fold_builtin_round (location_t
, tree
, tree
);
188 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
189 static tree
fold_builtin_bitop (tree
, tree
);
190 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
192 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
194 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
195 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
196 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
197 static tree
fold_builtin_isascii (location_t
, tree
);
198 static tree
fold_builtin_toascii (location_t
, tree
);
199 static tree
fold_builtin_isdigit (location_t
, tree
);
200 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
201 static tree
fold_builtin_abs (location_t
, tree
, tree
);
202 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
204 static tree
fold_builtin_0 (location_t
, tree
);
205 static tree
fold_builtin_1 (location_t
, tree
, tree
);
206 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
207 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
208 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
210 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
211 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
212 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
213 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
214 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
216 static rtx
expand_builtin_object_size (tree
);
217 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
218 enum built_in_function
);
219 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
220 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
221 static void maybe_emit_free_warning (tree
);
222 static tree
fold_builtin_object_size (tree
, tree
);
224 unsigned HOST_WIDE_INT target_newline
;
225 unsigned HOST_WIDE_INT target_percent
;
226 static unsigned HOST_WIDE_INT target_c
;
227 static unsigned HOST_WIDE_INT target_s
;
228 char target_percent_c
[3];
229 char target_percent_s
[3];
230 char target_percent_s_newline
[4];
231 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
232 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
233 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
234 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
235 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
236 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
237 static tree
do_mpfr_sincos (tree
, tree
, tree
);
238 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
239 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
240 const REAL_VALUE_TYPE
*, bool);
241 static tree
do_mpfr_remquo (tree
, tree
, tree
);
242 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
243 static void expand_builtin_sync_synchronize (void);
245 /* Return true if NAME starts with __builtin_ or __sync_. */
248 is_builtin_name (const char *name
)
250 if (strncmp (name
, "__builtin_", 10) == 0)
252 if (strncmp (name
, "__sync_", 7) == 0)
254 if (strncmp (name
, "__atomic_", 9) == 0)
257 && (!strcmp (name
, "__cilkrts_detach")
258 || !strcmp (name
, "__cilkrts_pop_frame")))
264 /* Return true if DECL is a function symbol representing a built-in. */
267 is_builtin_fn (tree decl
)
269 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
272 /* Return true if NODE should be considered for inline expansion regardless
273 of the optimization level. This means whenever a function is invoked with
274 its "internal" name, which normally contains the prefix "__builtin". */
277 called_as_built_in (tree node
)
279 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
280 we want the name used to call the function, not the name it
282 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
283 return is_builtin_name (name
);
286 /* Compute values M and N such that M divides (address of EXP - N) and such
287 that N < M. If these numbers can be determined, store M in alignp and N in
288 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
289 *alignp and any bit-offset to *bitposp.
291 Note that the address (and thus the alignment) computed here is based
292 on the address to which a symbol resolves, whereas DECL_ALIGN is based
293 on the address at which an object is actually located. These two
294 addresses are not always the same. For example, on ARM targets,
295 the address &foo of a Thumb function foo() has the lowest bit set,
296 whereas foo() itself starts on an even address.
298 If ADDR_P is true we are taking the address of the memory reference EXP
299 and thus cannot rely on the access taking place. */
302 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
303 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
305 HOST_WIDE_INT bitsize
, bitpos
;
308 int unsignedp
, volatilep
;
309 unsigned int align
= BITS_PER_UNIT
;
310 bool known_alignment
= false;
312 /* Get the innermost object and the constant (bitpos) and possibly
313 variable (offset) offset of the access. */
314 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
315 &mode
, &unsignedp
, &volatilep
, true);
317 /* Extract alignment information from the innermost object and
318 possibly adjust bitpos and offset. */
319 if (TREE_CODE (exp
) == FUNCTION_DECL
)
321 /* Function addresses can encode extra information besides their
322 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
323 allows the low bit to be used as a virtual bit, we know
324 that the address itself must be at least 2-byte aligned. */
325 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
326 align
= 2 * BITS_PER_UNIT
;
328 else if (TREE_CODE (exp
) == LABEL_DECL
)
330 else if (TREE_CODE (exp
) == CONST_DECL
)
332 /* The alignment of a CONST_DECL is determined by its initializer. */
333 exp
= DECL_INITIAL (exp
);
334 align
= TYPE_ALIGN (TREE_TYPE (exp
));
335 #ifdef CONSTANT_ALIGNMENT
336 if (CONSTANT_CLASS_P (exp
))
337 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
339 known_alignment
= true;
341 else if (DECL_P (exp
))
343 align
= DECL_ALIGN (exp
);
344 known_alignment
= true;
346 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
348 align
= TYPE_ALIGN (TREE_TYPE (exp
));
350 else if (TREE_CODE (exp
) == INDIRECT_REF
351 || TREE_CODE (exp
) == MEM_REF
352 || TREE_CODE (exp
) == TARGET_MEM_REF
)
354 tree addr
= TREE_OPERAND (exp
, 0);
356 unsigned HOST_WIDE_INT ptr_bitpos
;
358 if (TREE_CODE (addr
) == BIT_AND_EXPR
359 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
361 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
362 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
363 align
*= BITS_PER_UNIT
;
364 addr
= TREE_OPERAND (addr
, 0);
368 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
369 align
= MAX (ptr_align
, align
);
371 /* The alignment of the pointer operand in a TARGET_MEM_REF
372 has to take the variable offset parts into account. */
373 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
377 unsigned HOST_WIDE_INT step
= 1;
379 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
380 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
382 if (TMR_INDEX2 (exp
))
383 align
= BITS_PER_UNIT
;
384 known_alignment
= false;
387 /* When EXP is an actual memory reference then we can use
388 TYPE_ALIGN of a pointer indirection to derive alignment.
389 Do so only if get_pointer_alignment_1 did not reveal absolute
390 alignment knowledge and if using that alignment would
391 improve the situation. */
392 if (!addr_p
&& !known_alignment
393 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
394 align
= TYPE_ALIGN (TREE_TYPE (exp
));
397 /* Else adjust bitpos accordingly. */
398 bitpos
+= ptr_bitpos
;
399 if (TREE_CODE (exp
) == MEM_REF
400 || TREE_CODE (exp
) == TARGET_MEM_REF
)
401 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
404 else if (TREE_CODE (exp
) == STRING_CST
)
406 /* STRING_CST are the only constant objects we allow to be not
407 wrapped inside a CONST_DECL. */
408 align
= TYPE_ALIGN (TREE_TYPE (exp
));
409 #ifdef CONSTANT_ALIGNMENT
410 if (CONSTANT_CLASS_P (exp
))
411 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
413 known_alignment
= true;
416 /* If there is a non-constant offset part extract the maximum
417 alignment that can prevail. */
420 unsigned int trailing_zeros
= tree_ctz (offset
);
421 if (trailing_zeros
< HOST_BITS_PER_INT
)
423 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
425 align
= MIN (align
, inner
);
430 *bitposp
= bitpos
& (*alignp
- 1);
431 return known_alignment
;
434 /* For a memory reference expression EXP compute values M and N such that M
435 divides (&EXP - N) and such that N < M. If these numbers can be determined,
436 store M in alignp and N in *BITPOSP and return true. Otherwise return false
437 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
440 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
441 unsigned HOST_WIDE_INT
*bitposp
)
443 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
446 /* Return the alignment in bits of EXP, an object. */
449 get_object_alignment (tree exp
)
451 unsigned HOST_WIDE_INT bitpos
= 0;
454 get_object_alignment_1 (exp
, &align
, &bitpos
);
456 /* align and bitpos now specify known low bits of the pointer.
457 ptr & (align - 1) == bitpos. */
460 align
= (bitpos
& -bitpos
);
464 /* For a pointer valued expression EXP compute values M and N such that M
465 divides (EXP - N) and such that N < M. If these numbers can be determined,
466 store M in alignp and N in *BITPOSP and return true. Return false if
467 the results are just a conservative approximation.
469 If EXP is not a pointer, false is returned too. */
472 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
473 unsigned HOST_WIDE_INT
*bitposp
)
477 if (TREE_CODE (exp
) == ADDR_EXPR
)
478 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
479 alignp
, bitposp
, true);
480 else if (TREE_CODE (exp
) == SSA_NAME
481 && POINTER_TYPE_P (TREE_TYPE (exp
)))
483 unsigned int ptr_align
, ptr_misalign
;
484 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
486 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
488 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
489 *alignp
= ptr_align
* BITS_PER_UNIT
;
490 /* We cannot really tell whether this result is an approximation. */
496 *alignp
= BITS_PER_UNIT
;
500 else if (TREE_CODE (exp
) == INTEGER_CST
)
502 *alignp
= BIGGEST_ALIGNMENT
;
503 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
504 & (BIGGEST_ALIGNMENT
- 1));
509 *alignp
= BITS_PER_UNIT
;
513 /* Return the alignment in bits of EXP, a pointer valued expression.
514 The alignment returned is, by default, the alignment of the thing that
515 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
517 Otherwise, look at the expression to see if we can do better, i.e., if the
518 expression is actually pointing at an object whose alignment is tighter. */
521 get_pointer_alignment (tree exp
)
523 unsigned HOST_WIDE_INT bitpos
= 0;
526 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
528 /* align and bitpos now specify known low bits of the pointer.
529 ptr & (align - 1) == bitpos. */
532 align
= (bitpos
& -bitpos
);
537 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
538 way, because it could contain a zero byte in the middle.
539 TREE_STRING_LENGTH is the size of the character array, not the string.
541 ONLY_VALUE should be nonzero if the result is not going to be emitted
542 into the instruction stream and zero if it is going to be expanded.
543 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
544 is returned, otherwise NULL, since
545 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
546 evaluate the side-effects.
548 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
549 accesses. Note that this implies the result is not going to be emitted
550 into the instruction stream.
552 The value returned is of type `ssizetype'.
554 Unfortunately, string_constant can't access the values of const char
555 arrays with initializers, so neither can we do so here. */
558 c_strlen (tree src
, int only_value
)
561 HOST_WIDE_INT offset
;
567 if (TREE_CODE (src
) == COND_EXPR
568 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
572 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
573 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
574 if (tree_int_cst_equal (len1
, len2
))
578 if (TREE_CODE (src
) == COMPOUND_EXPR
579 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
580 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
582 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
584 src
= string_constant (src
, &offset_node
);
588 max
= TREE_STRING_LENGTH (src
) - 1;
589 ptr
= TREE_STRING_POINTER (src
);
591 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
593 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
594 compute the offset to the following null if we don't know where to
595 start searching for it. */
598 for (i
= 0; i
< max
; i
++)
602 /* We don't know the starting offset, but we do know that the string
603 has no internal zero bytes. We can assume that the offset falls
604 within the bounds of the string; otherwise, the programmer deserves
605 what he gets. Subtract the offset from the length of the string,
606 and return that. This would perhaps not be valid if we were dealing
607 with named arrays in addition to literal string constants. */
609 return size_diffop_loc (loc
, size_int (max
), offset_node
);
612 /* We have a known offset into the string. Start searching there for
613 a null character if we can represent it as a single HOST_WIDE_INT. */
614 if (offset_node
== 0)
616 else if (! tree_fits_shwi_p (offset_node
))
619 offset
= tree_to_shwi (offset_node
);
621 /* If the offset is known to be out of bounds, warn, and call strlen at
623 if (offset
< 0 || offset
> max
)
625 /* Suppress multiple warnings for propagated constant strings. */
627 && !TREE_NO_WARNING (src
))
629 warning_at (loc
, 0, "offset outside bounds of constant string");
630 TREE_NO_WARNING (src
) = 1;
635 /* Use strlen to search for the first zero byte. Since any strings
636 constructed with build_string will have nulls appended, we win even
637 if we get handed something like (char[4])"abcd".
639 Since OFFSET is our starting index into the string, no further
640 calculation is needed. */
641 return ssize_int (strlen (ptr
+ offset
));
644 /* Return a char pointer for a C string if it is a string constant
645 or sum of string constant and integer constant. */
652 src
= string_constant (src
, &offset_node
);
656 if (offset_node
== 0)
657 return TREE_STRING_POINTER (src
);
658 else if (!tree_fits_uhwi_p (offset_node
)
659 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
662 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
665 /* Return a constant integer corresponding to target reading
666 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
669 c_readstr (const char *str
, machine_mode mode
)
673 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
675 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
676 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
677 / HOST_BITS_PER_WIDE_INT
;
679 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
680 for (i
= 0; i
< len
; i
++)
684 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
687 if (WORDS_BIG_ENDIAN
)
688 j
= GET_MODE_SIZE (mode
) - i
- 1;
689 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
690 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
691 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
695 ch
= (unsigned char) str
[i
];
696 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
699 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
700 return immed_wide_int_const (c
, mode
);
703 /* Cast a target constant CST to target CHAR and if that value fits into
704 host char type, return zero and put that value into variable pointed to by
708 target_char_cast (tree cst
, char *p
)
710 unsigned HOST_WIDE_INT val
, hostval
;
712 if (TREE_CODE (cst
) != INTEGER_CST
713 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
716 /* Do not care if it fits or not right here. */
717 val
= TREE_INT_CST_LOW (cst
);
719 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
720 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
723 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
724 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
733 /* Similar to save_expr, but assumes that arbitrary code is not executed
734 in between the multiple evaluations. In particular, we assume that a
735 non-addressable local variable will not be modified. */
738 builtin_save_expr (tree exp
)
740 if (TREE_CODE (exp
) == SSA_NAME
741 || (TREE_ADDRESSABLE (exp
) == 0
742 && (TREE_CODE (exp
) == PARM_DECL
743 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
746 return save_expr (exp
);
749 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
750 times to get the address of either a higher stack frame, or a return
751 address located within it (depending on FNDECL_CODE). */
754 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
758 #ifdef INITIAL_FRAME_ADDRESS_RTX
759 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
773 tem
= frame_pointer_rtx
;
776 tem
= hard_frame_pointer_rtx
;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl
->accesses_prior_frames
= 1;
783 /* Some machines need special handling before we can access
784 arbitrary frames. For example, on the SPARC, we must first flush
785 all register windows to the stack. */
786 #ifdef SETUP_FRAME_ADDRESSES
788 SETUP_FRAME_ADDRESSES ();
791 /* On the SPARC, the return address is not in the frame, it is in a
792 register. There is no way to access it off of the current frame
793 pointer, but it can be accessed off the previous frame pointer by
794 reading the value from the register window save area. */
795 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
796 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
800 /* Scan back COUNT frames to the specified frame. */
801 for (i
= 0; i
< count
; i
++)
803 /* Assume the dynamic chain pointer is in the word that the
804 frame address points to, unless otherwise specified. */
805 #ifdef DYNAMIC_CHAIN_ADDRESS
806 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
808 tem
= memory_address (Pmode
, tem
);
809 tem
= gen_frame_mem (Pmode
, tem
);
810 tem
= copy_to_reg (tem
);
813 /* For __builtin_frame_address, return what we've got. But, on
814 the SPARC for example, we may have to add a bias. */
815 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
816 #ifdef FRAME_ADDR_RTX
817 return FRAME_ADDR_RTX (tem
);
822 /* For __builtin_return_address, get the return address from that frame. */
823 #ifdef RETURN_ADDR_RTX
824 tem
= RETURN_ADDR_RTX (count
, tem
);
826 tem
= memory_address (Pmode
,
827 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
828 tem
= gen_frame_mem (Pmode
, tem
);
833 /* Alias set used for setjmp buffer. */
834 static alias_set_type setjmp_alias_set
= -1;
836 /* Construct the leading half of a __builtin_setjmp call. Control will
837 return to RECEIVER_LABEL. This is also called directly by the SJLJ
838 exception handling code. */
841 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
843 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
847 if (setjmp_alias_set
== -1)
848 setjmp_alias_set
= new_alias_set ();
850 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
852 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
854 /* We store the frame pointer and the address of receiver_label in
855 the buffer and use the rest of it for the stack save area, which
856 is machine-dependent. */
858 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
859 set_mem_alias_set (mem
, setjmp_alias_set
);
860 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
862 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
863 GET_MODE_SIZE (Pmode
))),
864 set_mem_alias_set (mem
, setjmp_alias_set
);
866 emit_move_insn (validize_mem (mem
),
867 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
869 stack_save
= gen_rtx_MEM (sa_mode
,
870 plus_constant (Pmode
, buf_addr
,
871 2 * GET_MODE_SIZE (Pmode
)));
872 set_mem_alias_set (stack_save
, setjmp_alias_set
);
873 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
875 /* If there is further processing to do, do it. */
876 #ifdef HAVE_builtin_setjmp_setup
877 if (HAVE_builtin_setjmp_setup
)
878 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
881 /* We have a nonlocal label. */
882 cfun
->has_nonlocal_label
= 1;
885 /* Construct the trailing part of a __builtin_setjmp call. This is
886 also called directly by the SJLJ exception handling code.
887 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
890 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
894 /* Mark the FP as used when we get here, so we have to make sure it's
895 marked as used by this function. */
896 emit_use (hard_frame_pointer_rtx
);
898 /* Mark the static chain as clobbered here so life information
899 doesn't get messed up for it. */
900 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
901 if (chain
&& REG_P (chain
))
902 emit_clobber (chain
);
904 /* Now put in the code to restore the frame pointer, and argument
905 pointer, if needed. */
906 #ifdef HAVE_nonlocal_goto
907 if (! HAVE_nonlocal_goto
)
910 /* First adjust our frame pointer to its actual value. It was
911 previously set to the start of the virtual area corresponding to
912 the stacked variables when we branched here and now needs to be
913 adjusted to the actual hardware fp value.
915 Assignments to virtual registers are converted by
916 instantiate_virtual_regs into the corresponding assignment
917 to the underlying register (fp in this case) that makes
918 the original assignment true.
919 So the following insn will actually be decrementing fp by
920 STARTING_FRAME_OFFSET. */
921 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
923 /* Restoring the frame pointer also modifies the hard frame pointer.
924 Mark it used (so that the previous assignment remains live once
925 the frame pointer is eliminated) and clobbered (to represent the
926 implicit update from the assignment). */
927 emit_use (hard_frame_pointer_rtx
);
928 emit_clobber (hard_frame_pointer_rtx
);
931 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
932 if (fixed_regs
[ARG_POINTER_REGNUM
])
934 #ifdef ELIMINABLE_REGS
935 /* If the argument pointer can be eliminated in favor of the
936 frame pointer, we don't need to restore it. We assume here
937 that if such an elimination is present, it can always be used.
938 This is the case on all known machines; if we don't make this
939 assumption, we do unnecessary saving on many machines. */
941 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
943 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
944 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
945 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
948 if (i
== ARRAY_SIZE (elim_regs
))
951 /* Now restore our arg pointer from the address at which it
952 was saved in our stack frame. */
953 emit_move_insn (crtl
->args
.internal_arg_pointer
,
954 copy_to_reg (get_arg_pointer_save_area ()));
959 #ifdef HAVE_builtin_setjmp_receiver
960 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
961 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
964 #ifdef HAVE_nonlocal_goto_receiver
965 if (HAVE_nonlocal_goto_receiver
)
966 emit_insn (gen_nonlocal_goto_receiver ());
971 /* We must not allow the code we just generated to be reordered by
972 scheduling. Specifically, the update of the frame pointer must
973 happen immediately, not later. */
974 emit_insn (gen_blockage ());
977 /* __builtin_longjmp is passed a pointer to an array of five words (not
978 all will be used on all machines). It operates similarly to the C
979 library function of the same name, but is more efficient. Much of
980 the code below is copied from the handling of non-local gotos. */
983 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
986 rtx_insn
*insn
, *last
;
987 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
989 /* DRAP is needed for stack realign if longjmp is expanded to current
991 if (SUPPORTS_STACK_ALIGNMENT
)
992 crtl
->need_drap
= true;
994 if (setjmp_alias_set
== -1)
995 setjmp_alias_set
= new_alias_set ();
997 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
999 buf_addr
= force_reg (Pmode
, buf_addr
);
1001 /* We require that the user must pass a second argument of 1, because
1002 that is what builtin_setjmp will return. */
1003 gcc_assert (value
== const1_rtx
);
1005 last
= get_last_insn ();
1006 #ifdef HAVE_builtin_longjmp
1007 if (HAVE_builtin_longjmp
)
1008 emit_insn (gen_builtin_longjmp (buf_addr
));
1012 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1013 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1014 GET_MODE_SIZE (Pmode
)));
1016 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1017 2 * GET_MODE_SIZE (Pmode
)));
1018 set_mem_alias_set (fp
, setjmp_alias_set
);
1019 set_mem_alias_set (lab
, setjmp_alias_set
);
1020 set_mem_alias_set (stack
, setjmp_alias_set
);
1022 /* Pick up FP, label, and SP from the block and jump. This code is
1023 from expand_goto in stmt.c; see there for detailed comments. */
1024 #ifdef HAVE_nonlocal_goto
1025 if (HAVE_nonlocal_goto
)
1026 /* We have to pass a value to the nonlocal_goto pattern that will
1027 get copied into the static_chain pointer, but it does not matter
1028 what that value is, because builtin_setjmp does not use it. */
1029 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1033 lab
= copy_to_reg (lab
);
1035 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1036 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1038 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1039 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1041 emit_use (hard_frame_pointer_rtx
);
1042 emit_use (stack_pointer_rtx
);
1043 emit_indirect_jump (lab
);
1047 /* Search backwards and mark the jump insn as a non-local goto.
1048 Note that this precludes the use of __builtin_longjmp to a
1049 __builtin_setjmp target in the same function. However, we've
1050 already cautioned the user that these functions are for
1051 internal exception handling use only. */
1052 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1054 gcc_assert (insn
!= last
);
1058 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1061 else if (CALL_P (insn
))
1067 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1069 return (iter
->i
< iter
->n
);
1072 /* This function validates the types of a function call argument list
1073 against a specified list of tree_codes. If the last specifier is a 0,
1074 that represents an ellipses, otherwise the last specifier must be a
1078 validate_arglist (const_tree callexpr
, ...)
1080 enum tree_code code
;
1083 const_call_expr_arg_iterator iter
;
1086 va_start (ap
, callexpr
);
1087 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1091 code
= (enum tree_code
) va_arg (ap
, int);
1095 /* This signifies an ellipses, any further arguments are all ok. */
1099 /* This signifies an endlink, if no arguments remain, return
1100 true, otherwise return false. */
1101 res
= !more_const_call_expr_args_p (&iter
);
1104 /* If no parameters remain or the parameter's code does not
1105 match the specified code, return false. Otherwise continue
1106 checking any remaining arguments. */
1107 arg
= next_const_call_expr_arg (&iter
);
1108 if (!validate_arg (arg
, code
))
1115 /* We need gotos here since we can only have one VA_CLOSE in a
1123 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1124 and the address of the save area. */
1127 expand_builtin_nonlocal_goto (tree exp
)
1129 tree t_label
, t_save_area
;
1130 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1133 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1136 t_label
= CALL_EXPR_ARG (exp
, 0);
1137 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1139 r_label
= expand_normal (t_label
);
1140 r_label
= convert_memory_address (Pmode
, r_label
);
1141 r_save_area
= expand_normal (t_save_area
);
1142 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1143 /* Copy the address of the save location to a register just in case it was
1144 based on the frame pointer. */
1145 r_save_area
= copy_to_reg (r_save_area
);
1146 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1147 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1148 plus_constant (Pmode
, r_save_area
,
1149 GET_MODE_SIZE (Pmode
)));
1151 crtl
->has_nonlocal_goto
= 1;
1153 #ifdef HAVE_nonlocal_goto
1154 /* ??? We no longer need to pass the static chain value, afaik. */
1155 if (HAVE_nonlocal_goto
)
1156 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1160 r_label
= copy_to_reg (r_label
);
1162 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1163 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1165 /* Restore frame pointer for containing function. */
1166 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1167 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1169 /* USE of hard_frame_pointer_rtx added for consistency;
1170 not clear if really needed. */
1171 emit_use (hard_frame_pointer_rtx
);
1172 emit_use (stack_pointer_rtx
);
1174 /* If the architecture is using a GP register, we must
1175 conservatively assume that the target function makes use of it.
1176 The prologue of functions with nonlocal gotos must therefore
1177 initialize the GP register to the appropriate value, and we
1178 must then make sure that this value is live at the point
1179 of the jump. (Note that this doesn't necessarily apply
1180 to targets with a nonlocal_goto pattern; they are free
1181 to implement it in their own way. Note also that this is
1182 a no-op if the GP register is a global invariant.) */
1183 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1184 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1185 emit_use (pic_offset_table_rtx
);
1187 emit_indirect_jump (r_label
);
1190 /* Search backwards to the jump insn and mark it as a
1192 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1196 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1199 else if (CALL_P (insn
))
1206 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1207 (not all will be used on all machines) that was passed to __builtin_setjmp.
1208 It updates the stack pointer in that block to correspond to the current
1212 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1214 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1216 = gen_rtx_MEM (sa_mode
,
1219 plus_constant (Pmode
, buf_addr
,
1220 2 * GET_MODE_SIZE (Pmode
))));
1222 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1225 /* Expand a call to __builtin_prefetch. For a target that does not support
1226 data prefetch, evaluate the memory address argument in case it has side
1230 expand_builtin_prefetch (tree exp
)
1232 tree arg0
, arg1
, arg2
;
1236 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1239 arg0
= CALL_EXPR_ARG (exp
, 0);
1241 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1242 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1244 nargs
= call_expr_nargs (exp
);
1246 arg1
= CALL_EXPR_ARG (exp
, 1);
1248 arg1
= integer_zero_node
;
1250 arg2
= CALL_EXPR_ARG (exp
, 2);
1252 arg2
= integer_three_node
;
1254 /* Argument 0 is an address. */
1255 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1257 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1258 if (TREE_CODE (arg1
) != INTEGER_CST
)
1260 error ("second argument to %<__builtin_prefetch%> must be a constant");
1261 arg1
= integer_zero_node
;
1263 op1
= expand_normal (arg1
);
1264 /* Argument 1 must be either zero or one. */
1265 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1267 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1272 /* Argument 2 (locality) must be a compile-time constant int. */
1273 if (TREE_CODE (arg2
) != INTEGER_CST
)
1275 error ("third argument to %<__builtin_prefetch%> must be a constant");
1276 arg2
= integer_zero_node
;
1278 op2
= expand_normal (arg2
);
1279 /* Argument 2 must be 0, 1, 2, or 3. */
1280 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1282 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1286 #ifdef HAVE_prefetch
1289 struct expand_operand ops
[3];
1291 create_address_operand (&ops
[0], op0
);
1292 create_integer_operand (&ops
[1], INTVAL (op1
));
1293 create_integer_operand (&ops
[2], INTVAL (op2
));
1294 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1299 /* Don't do anything with direct references to volatile memory, but
1300 generate code to handle other side effects. */
1301 if (!MEM_P (op0
) && side_effects_p (op0
))
1305 /* Get a MEM rtx for expression EXP which is the address of an operand
1306 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1307 the maximum length of the block of memory that might be accessed or
1311 get_memory_rtx (tree exp
, tree len
)
1313 tree orig_exp
= exp
;
1316 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1317 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1318 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1319 exp
= TREE_OPERAND (exp
, 0);
1321 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1322 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1324 /* Get an expression we can use to find the attributes to assign to MEM.
1325 First remove any nops. */
1326 while (CONVERT_EXPR_P (exp
)
1327 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1328 exp
= TREE_OPERAND (exp
, 0);
1330 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1331 (as builtin stringops may alias with anything). */
1332 exp
= fold_build2 (MEM_REF
,
1333 build_array_type (char_type_node
,
1334 build_range_type (sizetype
,
1335 size_one_node
, len
)),
1336 exp
, build_int_cst (ptr_type_node
, 0));
1338 /* If the MEM_REF has no acceptable address, try to get the base object
1339 from the original address we got, and build an all-aliasing
1340 unknown-sized access to that one. */
1341 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1342 set_mem_attributes (mem
, exp
, 0);
1343 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1344 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1347 exp
= build_fold_addr_expr (exp
);
1348 exp
= fold_build2 (MEM_REF
,
1349 build_array_type (char_type_node
,
1350 build_range_type (sizetype
,
1353 exp
, build_int_cst (ptr_type_node
, 0));
1354 set_mem_attributes (mem
, exp
, 0);
1356 set_mem_alias_set (mem
, 0);
1360 /* Built-in functions to perform an untyped call and return. */
1362 #define apply_args_mode \
1363 (this_target_builtins->x_apply_args_mode)
1364 #define apply_result_mode \
1365 (this_target_builtins->x_apply_result_mode)
1367 /* Return the size required for the block returned by __builtin_apply_args,
1368 and initialize apply_args_mode. */
1371 apply_args_size (void)
1373 static int size
= -1;
1378 /* The values computed by this function never change. */
1381 /* The first value is the incoming arg-pointer. */
1382 size
= GET_MODE_SIZE (Pmode
);
1384 /* The second value is the structure value address unless this is
1385 passed as an "invisible" first argument. */
1386 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1387 size
+= GET_MODE_SIZE (Pmode
);
1389 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1390 if (FUNCTION_ARG_REGNO_P (regno
))
1392 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1394 gcc_assert (mode
!= VOIDmode
);
1396 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1397 if (size
% align
!= 0)
1398 size
= CEIL (size
, align
) * align
;
1399 size
+= GET_MODE_SIZE (mode
);
1400 apply_args_mode
[regno
] = mode
;
1404 apply_args_mode
[regno
] = VOIDmode
;
1410 /* Return the size required for the block returned by __builtin_apply,
1411 and initialize apply_result_mode. */
1414 apply_result_size (void)
1416 static int size
= -1;
1420 /* The values computed by this function never change. */
1425 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1426 if (targetm
.calls
.function_value_regno_p (regno
))
1428 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1430 gcc_assert (mode
!= VOIDmode
);
1432 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1433 if (size
% align
!= 0)
1434 size
= CEIL (size
, align
) * align
;
1435 size
+= GET_MODE_SIZE (mode
);
1436 apply_result_mode
[regno
] = mode
;
1439 apply_result_mode
[regno
] = VOIDmode
;
1441 /* Allow targets that use untyped_call and untyped_return to override
1442 the size so that machine-specific information can be stored here. */
1443 #ifdef APPLY_RESULT_SIZE
1444 size
= APPLY_RESULT_SIZE
;
1450 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1451 /* Create a vector describing the result block RESULT. If SAVEP is true,
1452 the result block is used to save the values; otherwise it is used to
1453 restore the values. */
1456 result_vector (int savep
, rtx result
)
1458 int regno
, size
, align
, nelts
;
1461 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1464 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1465 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1467 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1468 if (size
% align
!= 0)
1469 size
= CEIL (size
, align
) * align
;
1470 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1471 mem
= adjust_address (result
, mode
, size
);
1472 savevec
[nelts
++] = (savep
1473 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1474 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1475 size
+= GET_MODE_SIZE (mode
);
1477 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1479 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1481 /* Save the state required to perform an untyped call with the same
1482 arguments as were passed to the current function. */
1485 expand_builtin_apply_args_1 (void)
1488 int size
, align
, regno
;
1490 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1492 /* Create a block where the arg-pointer, structure value address,
1493 and argument registers can be saved. */
1494 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1496 /* Walk past the arg-pointer and structure value address. */
1497 size
= GET_MODE_SIZE (Pmode
);
1498 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1499 size
+= GET_MODE_SIZE (Pmode
);
1501 /* Save each register used in calling a function to the block. */
1502 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1503 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1505 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1506 if (size
% align
!= 0)
1507 size
= CEIL (size
, align
) * align
;
1509 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1511 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1512 size
+= GET_MODE_SIZE (mode
);
1515 /* Save the arg pointer to the block. */
1516 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1517 #ifdef STACK_GROWS_DOWNWARD
1518 /* We need the pointer as the caller actually passed them to us, not
1519 as we might have pretended they were passed. Make sure it's a valid
1520 operand, as emit_move_insn isn't expected to handle a PLUS. */
1522 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1525 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1527 size
= GET_MODE_SIZE (Pmode
);
1529 /* Save the structure value address unless this is passed as an
1530 "invisible" first argument. */
1531 if (struct_incoming_value
)
1533 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1534 copy_to_reg (struct_incoming_value
));
1535 size
+= GET_MODE_SIZE (Pmode
);
1538 /* Return the address of the block. */
1539 return copy_addr_to_reg (XEXP (registers
, 0));
1542 /* __builtin_apply_args returns block of memory allocated on
1543 the stack into which is stored the arg pointer, structure
1544 value address, static chain, and all the registers that might
1545 possibly be used in performing a function call. The code is
1546 moved to the start of the function so the incoming values are
1550 expand_builtin_apply_args (void)
1552 /* Don't do __builtin_apply_args more than once in a function.
1553 Save the result of the first call and reuse it. */
1554 if (apply_args_value
!= 0)
1555 return apply_args_value
;
1557 /* When this function is called, it means that registers must be
1558 saved on entry to this function. So we migrate the
1559 call to the first insn of this function. */
1564 temp
= expand_builtin_apply_args_1 ();
1568 apply_args_value
= temp
;
1570 /* Put the insns after the NOTE that starts the function.
1571 If this is inside a start_sequence, make the outer-level insn
1572 chain current, so the code is placed at the start of the
1573 function. If internal_arg_pointer is a non-virtual pseudo,
1574 it needs to be placed after the function that initializes
1576 push_topmost_sequence ();
1577 if (REG_P (crtl
->args
.internal_arg_pointer
)
1578 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1579 emit_insn_before (seq
, parm_birth_insn
);
1581 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1582 pop_topmost_sequence ();
1587 /* Perform an untyped call and save the state required to perform an
1588 untyped return of whatever value was returned by the given function. */
1591 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1593 int size
, align
, regno
;
1595 rtx incoming_args
, result
, reg
, dest
, src
;
1596 rtx_call_insn
*call_insn
;
1597 rtx old_stack_level
= 0;
1598 rtx call_fusage
= 0;
1599 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1601 arguments
= convert_memory_address (Pmode
, arguments
);
1603 /* Create a block where the return registers can be saved. */
1604 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1606 /* Fetch the arg pointer from the ARGUMENTS block. */
1607 incoming_args
= gen_reg_rtx (Pmode
);
1608 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1609 #ifndef STACK_GROWS_DOWNWARD
1610 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1611 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1614 /* Push a new argument block and copy the arguments. Do not allow
1615 the (potential) memcpy call below to interfere with our stack
1617 do_pending_stack_adjust ();
1620 /* Save the stack with nonlocal if available. */
1621 #ifdef HAVE_save_stack_nonlocal
1622 if (HAVE_save_stack_nonlocal
)
1623 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1626 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1628 /* Allocate a block of memory onto the stack and copy the memory
1629 arguments to the outgoing arguments address. We can pass TRUE
1630 as the 4th argument because we just saved the stack pointer
1631 and will restore it right after the call. */
1632 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1634 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1635 may have already set current_function_calls_alloca to true.
1636 current_function_calls_alloca won't be set if argsize is zero,
1637 so we have to guarantee need_drap is true here. */
1638 if (SUPPORTS_STACK_ALIGNMENT
)
1639 crtl
->need_drap
= true;
1641 dest
= virtual_outgoing_args_rtx
;
1642 #ifndef STACK_GROWS_DOWNWARD
1643 if (CONST_INT_P (argsize
))
1644 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1646 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1648 dest
= gen_rtx_MEM (BLKmode
, dest
);
1649 set_mem_align (dest
, PARM_BOUNDARY
);
1650 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1651 set_mem_align (src
, PARM_BOUNDARY
);
1652 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1654 /* Refer to the argument block. */
1656 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1657 set_mem_align (arguments
, PARM_BOUNDARY
);
1659 /* Walk past the arg-pointer and structure value address. */
1660 size
= GET_MODE_SIZE (Pmode
);
1662 size
+= GET_MODE_SIZE (Pmode
);
1664 /* Restore each of the registers previously saved. Make USE insns
1665 for each of these registers for use in making the call. */
1666 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1667 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1669 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1670 if (size
% align
!= 0)
1671 size
= CEIL (size
, align
) * align
;
1672 reg
= gen_rtx_REG (mode
, regno
);
1673 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1674 use_reg (&call_fusage
, reg
);
1675 size
+= GET_MODE_SIZE (mode
);
1678 /* Restore the structure value address unless this is passed as an
1679 "invisible" first argument. */
1680 size
= GET_MODE_SIZE (Pmode
);
1683 rtx value
= gen_reg_rtx (Pmode
);
1684 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1685 emit_move_insn (struct_value
, value
);
1686 if (REG_P (struct_value
))
1687 use_reg (&call_fusage
, struct_value
);
1688 size
+= GET_MODE_SIZE (Pmode
);
1691 /* All arguments and registers used for the call are set up by now! */
1692 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1694 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1695 and we don't want to load it into a register as an optimization,
1696 because prepare_call_address already did it if it should be done. */
1697 if (GET_CODE (function
) != SYMBOL_REF
)
1698 function
= memory_address (FUNCTION_MODE
, function
);
1700 /* Generate the actual call instruction and save the return value. */
1701 #ifdef HAVE_untyped_call
1702 if (HAVE_untyped_call
)
1703 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1704 result
, result_vector (1, result
)));
1707 #ifdef HAVE_call_value
1708 if (HAVE_call_value
)
1712 /* Locate the unique return register. It is not possible to
1713 express a call that sets more than one return register using
1714 call_value; use untyped_call for that. In fact, untyped_call
1715 only needs to save the return registers in the given block. */
1716 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1717 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1719 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1721 valreg
= gen_rtx_REG (mode
, regno
);
1724 emit_call_insn (GEN_CALL_VALUE (valreg
,
1725 gen_rtx_MEM (FUNCTION_MODE
, function
),
1726 const0_rtx
, NULL_RTX
, const0_rtx
));
1728 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1734 /* Find the CALL insn we just emitted, and attach the register usage
1736 call_insn
= last_call_insn ();
1737 add_function_usage_to (call_insn
, call_fusage
);
1739 /* Restore the stack. */
1740 #ifdef HAVE_save_stack_nonlocal
1741 if (HAVE_save_stack_nonlocal
)
1742 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1745 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1746 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1750 /* Return the address of the result block. */
1751 result
= copy_addr_to_reg (XEXP (result
, 0));
1752 return convert_memory_address (ptr_mode
, result
);
1755 /* Perform an untyped return. */
1758 expand_builtin_return (rtx result
)
1760 int size
, align
, regno
;
1763 rtx_insn
*call_fusage
= 0;
1765 result
= convert_memory_address (Pmode
, result
);
1767 apply_result_size ();
1768 result
= gen_rtx_MEM (BLKmode
, result
);
1770 #ifdef HAVE_untyped_return
1771 if (HAVE_untyped_return
)
1773 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1779 /* Restore the return value and note that each value is used. */
1781 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1782 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1784 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1785 if (size
% align
!= 0)
1786 size
= CEIL (size
, align
) * align
;
1787 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1788 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1790 push_to_sequence (call_fusage
);
1792 call_fusage
= get_insns ();
1794 size
+= GET_MODE_SIZE (mode
);
1797 /* Put the USE insns before the return. */
1798 emit_insn (call_fusage
);
1800 /* Return whatever values was restored by jumping directly to the end
1802 expand_naked_return ();
1805 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1807 static enum type_class
1808 type_to_class (tree type
)
1810 switch (TREE_CODE (type
))
1812 case VOID_TYPE
: return void_type_class
;
1813 case INTEGER_TYPE
: return integer_type_class
;
1814 case ENUMERAL_TYPE
: return enumeral_type_class
;
1815 case BOOLEAN_TYPE
: return boolean_type_class
;
1816 case POINTER_TYPE
: return pointer_type_class
;
1817 case REFERENCE_TYPE
: return reference_type_class
;
1818 case OFFSET_TYPE
: return offset_type_class
;
1819 case REAL_TYPE
: return real_type_class
;
1820 case COMPLEX_TYPE
: return complex_type_class
;
1821 case FUNCTION_TYPE
: return function_type_class
;
1822 case METHOD_TYPE
: return method_type_class
;
1823 case RECORD_TYPE
: return record_type_class
;
1825 case QUAL_UNION_TYPE
: return union_type_class
;
1826 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1827 ? string_type_class
: array_type_class
);
1828 case LANG_TYPE
: return lang_type_class
;
1829 default: return no_type_class
;
1833 /* Expand a call EXP to __builtin_classify_type. */
1836 expand_builtin_classify_type (tree exp
)
1838 if (call_expr_nargs (exp
))
1839 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1840 return GEN_INT (no_type_class
);
1843 /* This helper macro, meant to be used in mathfn_built_in below,
1844 determines which among a set of three builtin math functions is
1845 appropriate for a given type mode. The `F' and `L' cases are
1846 automatically generated from the `double' case. */
1847 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1848 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1849 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1850 fcodel = BUILT_IN_MATHFN##L ; break;
1851 /* Similar to above, but appends _R after any F/L suffix. */
1852 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1853 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1854 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1855 fcodel = BUILT_IN_MATHFN##L_R ; break;
1857 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1858 if available. If IMPLICIT is true use the implicit builtin declaration,
1859 otherwise use the explicit declaration. If we can't do the conversion,
1863 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1865 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1869 CASE_MATHFN (BUILT_IN_ACOS
)
1870 CASE_MATHFN (BUILT_IN_ACOSH
)
1871 CASE_MATHFN (BUILT_IN_ASIN
)
1872 CASE_MATHFN (BUILT_IN_ASINH
)
1873 CASE_MATHFN (BUILT_IN_ATAN
)
1874 CASE_MATHFN (BUILT_IN_ATAN2
)
1875 CASE_MATHFN (BUILT_IN_ATANH
)
1876 CASE_MATHFN (BUILT_IN_CBRT
)
1877 CASE_MATHFN (BUILT_IN_CEIL
)
1878 CASE_MATHFN (BUILT_IN_CEXPI
)
1879 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1880 CASE_MATHFN (BUILT_IN_COS
)
1881 CASE_MATHFN (BUILT_IN_COSH
)
1882 CASE_MATHFN (BUILT_IN_DREM
)
1883 CASE_MATHFN (BUILT_IN_ERF
)
1884 CASE_MATHFN (BUILT_IN_ERFC
)
1885 CASE_MATHFN (BUILT_IN_EXP
)
1886 CASE_MATHFN (BUILT_IN_EXP10
)
1887 CASE_MATHFN (BUILT_IN_EXP2
)
1888 CASE_MATHFN (BUILT_IN_EXPM1
)
1889 CASE_MATHFN (BUILT_IN_FABS
)
1890 CASE_MATHFN (BUILT_IN_FDIM
)
1891 CASE_MATHFN (BUILT_IN_FLOOR
)
1892 CASE_MATHFN (BUILT_IN_FMA
)
1893 CASE_MATHFN (BUILT_IN_FMAX
)
1894 CASE_MATHFN (BUILT_IN_FMIN
)
1895 CASE_MATHFN (BUILT_IN_FMOD
)
1896 CASE_MATHFN (BUILT_IN_FREXP
)
1897 CASE_MATHFN (BUILT_IN_GAMMA
)
1898 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1899 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1900 CASE_MATHFN (BUILT_IN_HYPOT
)
1901 CASE_MATHFN (BUILT_IN_ILOGB
)
1902 CASE_MATHFN (BUILT_IN_ICEIL
)
1903 CASE_MATHFN (BUILT_IN_IFLOOR
)
1904 CASE_MATHFN (BUILT_IN_INF
)
1905 CASE_MATHFN (BUILT_IN_IRINT
)
1906 CASE_MATHFN (BUILT_IN_IROUND
)
1907 CASE_MATHFN (BUILT_IN_ISINF
)
1908 CASE_MATHFN (BUILT_IN_J0
)
1909 CASE_MATHFN (BUILT_IN_J1
)
1910 CASE_MATHFN (BUILT_IN_JN
)
1911 CASE_MATHFN (BUILT_IN_LCEIL
)
1912 CASE_MATHFN (BUILT_IN_LDEXP
)
1913 CASE_MATHFN (BUILT_IN_LFLOOR
)
1914 CASE_MATHFN (BUILT_IN_LGAMMA
)
1915 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1916 CASE_MATHFN (BUILT_IN_LLCEIL
)
1917 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1918 CASE_MATHFN (BUILT_IN_LLRINT
)
1919 CASE_MATHFN (BUILT_IN_LLROUND
)
1920 CASE_MATHFN (BUILT_IN_LOG
)
1921 CASE_MATHFN (BUILT_IN_LOG10
)
1922 CASE_MATHFN (BUILT_IN_LOG1P
)
1923 CASE_MATHFN (BUILT_IN_LOG2
)
1924 CASE_MATHFN (BUILT_IN_LOGB
)
1925 CASE_MATHFN (BUILT_IN_LRINT
)
1926 CASE_MATHFN (BUILT_IN_LROUND
)
1927 CASE_MATHFN (BUILT_IN_MODF
)
1928 CASE_MATHFN (BUILT_IN_NAN
)
1929 CASE_MATHFN (BUILT_IN_NANS
)
1930 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1931 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1932 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1933 CASE_MATHFN (BUILT_IN_POW
)
1934 CASE_MATHFN (BUILT_IN_POWI
)
1935 CASE_MATHFN (BUILT_IN_POW10
)
1936 CASE_MATHFN (BUILT_IN_REMAINDER
)
1937 CASE_MATHFN (BUILT_IN_REMQUO
)
1938 CASE_MATHFN (BUILT_IN_RINT
)
1939 CASE_MATHFN (BUILT_IN_ROUND
)
1940 CASE_MATHFN (BUILT_IN_SCALB
)
1941 CASE_MATHFN (BUILT_IN_SCALBLN
)
1942 CASE_MATHFN (BUILT_IN_SCALBN
)
1943 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1944 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1945 CASE_MATHFN (BUILT_IN_SIN
)
1946 CASE_MATHFN (BUILT_IN_SINCOS
)
1947 CASE_MATHFN (BUILT_IN_SINH
)
1948 CASE_MATHFN (BUILT_IN_SQRT
)
1949 CASE_MATHFN (BUILT_IN_TAN
)
1950 CASE_MATHFN (BUILT_IN_TANH
)
1951 CASE_MATHFN (BUILT_IN_TGAMMA
)
1952 CASE_MATHFN (BUILT_IN_TRUNC
)
1953 CASE_MATHFN (BUILT_IN_Y0
)
1954 CASE_MATHFN (BUILT_IN_Y1
)
1955 CASE_MATHFN (BUILT_IN_YN
)
1961 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1963 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1965 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1970 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1973 return builtin_decl_explicit (fcode2
);
1976 /* Like mathfn_built_in_1(), but always use the implicit array. */
1979 mathfn_built_in (tree type
, enum built_in_function fn
)
1981 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1984 /* If errno must be maintained, expand the RTL to check if the result,
1985 TARGET, of a built-in function call, EXP, is NaN, and if so set
1989 expand_errno_check (tree exp
, rtx target
)
1991 rtx_code_label
*lab
= gen_label_rtx ();
1993 /* Test the result; if it is NaN, set errno=EDOM because
1994 the argument was not in the domain. */
1995 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1996 NULL_RTX
, NULL_RTX
, lab
,
1997 /* The jump is very likely. */
1998 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
2001 /* If this built-in doesn't throw an exception, set errno directly. */
2002 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
2004 #ifdef GEN_ERRNO_RTX
2005 rtx errno_rtx
= GEN_ERRNO_RTX
;
2008 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
2010 emit_move_insn (errno_rtx
,
2011 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
2017 /* Make sure the library call isn't expanded as a tail call. */
2018 CALL_EXPR_TAILCALL (exp
) = 0;
2020 /* We can't set errno=EDOM directly; let the library call do it.
2021 Pop the arguments right away in case the call gets deleted. */
2023 expand_call (exp
, target
, 0);
2028 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2029 Return NULL_RTX if a normal call should be emitted rather than expanding
2030 the function in-line. EXP is the expression that is a call to the builtin
2031 function; if convenient, the result should be placed in TARGET.
2032 SUBTARGET may be used as the target for computing one of EXP's operands. */
2035 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2037 optab builtin_optab
;
2040 tree fndecl
= get_callee_fndecl (exp
);
2042 bool errno_set
= false;
2043 bool try_widening
= false;
2046 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2049 arg
= CALL_EXPR_ARG (exp
, 0);
2051 switch (DECL_FUNCTION_CODE (fndecl
))
2053 CASE_FLT_FN (BUILT_IN_SQRT
):
2054 errno_set
= ! tree_expr_nonnegative_p (arg
);
2055 try_widening
= true;
2056 builtin_optab
= sqrt_optab
;
2058 CASE_FLT_FN (BUILT_IN_EXP
):
2059 errno_set
= true; builtin_optab
= exp_optab
; break;
2060 CASE_FLT_FN (BUILT_IN_EXP10
):
2061 CASE_FLT_FN (BUILT_IN_POW10
):
2062 errno_set
= true; builtin_optab
= exp10_optab
; break;
2063 CASE_FLT_FN (BUILT_IN_EXP2
):
2064 errno_set
= true; builtin_optab
= exp2_optab
; break;
2065 CASE_FLT_FN (BUILT_IN_EXPM1
):
2066 errno_set
= true; builtin_optab
= expm1_optab
; break;
2067 CASE_FLT_FN (BUILT_IN_LOGB
):
2068 errno_set
= true; builtin_optab
= logb_optab
; break;
2069 CASE_FLT_FN (BUILT_IN_LOG
):
2070 errno_set
= true; builtin_optab
= log_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_LOG10
):
2072 errno_set
= true; builtin_optab
= log10_optab
; break;
2073 CASE_FLT_FN (BUILT_IN_LOG2
):
2074 errno_set
= true; builtin_optab
= log2_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_LOG1P
):
2076 errno_set
= true; builtin_optab
= log1p_optab
; break;
2077 CASE_FLT_FN (BUILT_IN_ASIN
):
2078 builtin_optab
= asin_optab
; break;
2079 CASE_FLT_FN (BUILT_IN_ACOS
):
2080 builtin_optab
= acos_optab
; break;
2081 CASE_FLT_FN (BUILT_IN_TAN
):
2082 builtin_optab
= tan_optab
; break;
2083 CASE_FLT_FN (BUILT_IN_ATAN
):
2084 builtin_optab
= atan_optab
; break;
2085 CASE_FLT_FN (BUILT_IN_FLOOR
):
2086 builtin_optab
= floor_optab
; break;
2087 CASE_FLT_FN (BUILT_IN_CEIL
):
2088 builtin_optab
= ceil_optab
; break;
2089 CASE_FLT_FN (BUILT_IN_TRUNC
):
2090 builtin_optab
= btrunc_optab
; break;
2091 CASE_FLT_FN (BUILT_IN_ROUND
):
2092 builtin_optab
= round_optab
; break;
2093 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2094 builtin_optab
= nearbyint_optab
;
2095 if (flag_trapping_math
)
2097 /* Else fallthrough and expand as rint. */
2098 CASE_FLT_FN (BUILT_IN_RINT
):
2099 builtin_optab
= rint_optab
; break;
2100 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2101 builtin_optab
= significand_optab
; break;
2106 /* Make a suitable register to place result in. */
2107 mode
= TYPE_MODE (TREE_TYPE (exp
));
2109 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2112 /* Before working hard, check whether the instruction is available, but try
2113 to widen the mode for specific operations. */
2114 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2115 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2116 && (!errno_set
|| !optimize_insn_for_size_p ()))
2118 rtx result
= gen_reg_rtx (mode
);
2120 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2121 need to expand the argument again. This way, we will not perform
2122 side-effects more the once. */
2123 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2125 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2129 /* Compute into RESULT.
2130 Set RESULT to wherever the result comes back. */
2131 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2136 expand_errno_check (exp
, result
);
2138 /* Output the entire sequence. */
2139 insns
= get_insns ();
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2151 return expand_call (exp
, target
, target
== const0_rtx
);
2154 /* Expand a call to the builtin binary math functions (pow and atan2).
2155 Return NULL_RTX if a normal call should be emitted rather than expanding the
2156 function in-line. EXP is the expression that is a call to the builtin
2157 function; if convenient, the result should be placed in TARGET.
2158 SUBTARGET may be used as the target for computing one of EXP's
2162 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2164 optab builtin_optab
;
2165 rtx op0
, op1
, result
;
2167 int op1_type
= REAL_TYPE
;
2168 tree fndecl
= get_callee_fndecl (exp
);
2171 bool errno_set
= true;
2173 switch (DECL_FUNCTION_CODE (fndecl
))
2175 CASE_FLT_FN (BUILT_IN_SCALBN
):
2176 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2177 CASE_FLT_FN (BUILT_IN_LDEXP
):
2178 op1_type
= INTEGER_TYPE
;
2183 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2186 arg0
= CALL_EXPR_ARG (exp
, 0);
2187 arg1
= CALL_EXPR_ARG (exp
, 1);
2189 switch (DECL_FUNCTION_CODE (fndecl
))
2191 CASE_FLT_FN (BUILT_IN_POW
):
2192 builtin_optab
= pow_optab
; break;
2193 CASE_FLT_FN (BUILT_IN_ATAN2
):
2194 builtin_optab
= atan2_optab
; break;
2195 CASE_FLT_FN (BUILT_IN_SCALB
):
2196 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2198 builtin_optab
= scalb_optab
; break;
2199 CASE_FLT_FN (BUILT_IN_SCALBN
):
2200 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2201 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2203 /* Fall through... */
2204 CASE_FLT_FN (BUILT_IN_LDEXP
):
2205 builtin_optab
= ldexp_optab
; break;
2206 CASE_FLT_FN (BUILT_IN_FMOD
):
2207 builtin_optab
= fmod_optab
; break;
2208 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2209 CASE_FLT_FN (BUILT_IN_DREM
):
2210 builtin_optab
= remainder_optab
; break;
2215 /* Make a suitable register to place result in. */
2216 mode
= TYPE_MODE (TREE_TYPE (exp
));
2218 /* Before working hard, check whether the instruction is available. */
2219 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2222 result
= gen_reg_rtx (mode
);
2224 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2227 if (errno_set
&& optimize_insn_for_size_p ())
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2232 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2234 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2235 op1
= expand_normal (arg1
);
2239 /* Compute into RESULT.
2240 Set RESULT to wherever the result comes back. */
2241 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2242 result
, 0, OPTAB_DIRECT
);
2244 /* If we were unable to expand via the builtin, stop the sequence
2245 (without outputting the insns) and call to the library function
2246 with the stabilized argument list. */
2250 return expand_call (exp
, target
, target
== const0_rtx
);
2254 expand_errno_check (exp
, result
);
2256 /* Output the entire sequence. */
2257 insns
= get_insns ();
2264 /* Expand a call to the builtin trinary math functions (fma).
2265 Return NULL_RTX if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET.
2268 SUBTARGET may be used as the target for computing one of EXP's
2272 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2274 optab builtin_optab
;
2275 rtx op0
, op1
, op2
, result
;
2277 tree fndecl
= get_callee_fndecl (exp
);
2278 tree arg0
, arg1
, arg2
;
2281 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2284 arg0
= CALL_EXPR_ARG (exp
, 0);
2285 arg1
= CALL_EXPR_ARG (exp
, 1);
2286 arg2
= CALL_EXPR_ARG (exp
, 2);
2288 switch (DECL_FUNCTION_CODE (fndecl
))
2290 CASE_FLT_FN (BUILT_IN_FMA
):
2291 builtin_optab
= fma_optab
; break;
2296 /* Make a suitable register to place result in. */
2297 mode
= TYPE_MODE (TREE_TYPE (exp
));
2299 /* Before working hard, check whether the instruction is available. */
2300 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2303 result
= gen_reg_rtx (mode
);
2305 /* Always stabilize the argument list. */
2306 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2307 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2308 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2310 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2311 op1
= expand_normal (arg1
);
2312 op2
= expand_normal (arg2
);
2316 /* Compute into RESULT.
2317 Set RESULT to wherever the result comes back. */
2318 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2321 /* If we were unable to expand via the builtin, stop the sequence
2322 (without outputting the insns) and call to the library function
2323 with the stabilized argument list. */
2327 return expand_call (exp
, target
, target
== const0_rtx
);
2330 /* Output the entire sequence. */
2331 insns
= get_insns ();
2338 /* Expand a call to the builtin sin and cos math functions.
2339 Return NULL_RTX if a normal call should be emitted rather than expanding the
2340 function in-line. EXP is the expression that is a call to the builtin
2341 function; if convenient, the result should be placed in TARGET.
2342 SUBTARGET may be used as the target for computing one of EXP's
2346 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2348 optab builtin_optab
;
2351 tree fndecl
= get_callee_fndecl (exp
);
2355 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2358 arg
= CALL_EXPR_ARG (exp
, 0);
2360 switch (DECL_FUNCTION_CODE (fndecl
))
2362 CASE_FLT_FN (BUILT_IN_SIN
):
2363 CASE_FLT_FN (BUILT_IN_COS
):
2364 builtin_optab
= sincos_optab
; break;
2369 /* Make a suitable register to place result in. */
2370 mode
= TYPE_MODE (TREE_TYPE (exp
));
2372 /* Check if sincos insn is available, otherwise fallback
2373 to sin or cos insn. */
2374 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2375 switch (DECL_FUNCTION_CODE (fndecl
))
2377 CASE_FLT_FN (BUILT_IN_SIN
):
2378 builtin_optab
= sin_optab
; break;
2379 CASE_FLT_FN (BUILT_IN_COS
):
2380 builtin_optab
= cos_optab
; break;
2385 /* Before working hard, check whether the instruction is available. */
2386 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2388 rtx result
= gen_reg_rtx (mode
);
2390 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2391 need to expand the argument again. This way, we will not perform
2392 side-effects more the once. */
2393 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2395 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2399 /* Compute into RESULT.
2400 Set RESULT to wherever the result comes back. */
2401 if (builtin_optab
== sincos_optab
)
2405 switch (DECL_FUNCTION_CODE (fndecl
))
2407 CASE_FLT_FN (BUILT_IN_SIN
):
2408 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2410 CASE_FLT_FN (BUILT_IN_COS
):
2411 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2419 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2423 /* Output the entire sequence. */
2424 insns
= get_insns ();
2430 /* If we were unable to expand via the builtin, stop the sequence
2431 (without outputting the insns) and call to the library function
2432 with the stabilized argument list. */
2436 return expand_call (exp
, target
, target
== const0_rtx
);
2439 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2440 return an RTL instruction code that implements the functionality.
2441 If that isn't possible or available return CODE_FOR_nothing. */
2443 static enum insn_code
2444 interclass_mathfn_icode (tree arg
, tree fndecl
)
2446 bool errno_set
= false;
2447 optab builtin_optab
= unknown_optab
;
2450 switch (DECL_FUNCTION_CODE (fndecl
))
2452 CASE_FLT_FN (BUILT_IN_ILOGB
):
2453 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2454 CASE_FLT_FN (BUILT_IN_ISINF
):
2455 builtin_optab
= isinf_optab
; break;
2456 case BUILT_IN_ISNORMAL
:
2457 case BUILT_IN_ISFINITE
:
2458 CASE_FLT_FN (BUILT_IN_FINITE
):
2459 case BUILT_IN_FINITED32
:
2460 case BUILT_IN_FINITED64
:
2461 case BUILT_IN_FINITED128
:
2462 case BUILT_IN_ISINFD32
:
2463 case BUILT_IN_ISINFD64
:
2464 case BUILT_IN_ISINFD128
:
2465 /* These builtins have no optabs (yet). */
2471 /* There's no easy way to detect the case we need to set EDOM. */
2472 if (flag_errno_math
&& errno_set
)
2473 return CODE_FOR_nothing
;
2475 /* Optab mode depends on the mode of the input argument. */
2476 mode
= TYPE_MODE (TREE_TYPE (arg
));
2479 return optab_handler (builtin_optab
, mode
);
2480 return CODE_FOR_nothing
;
2483 /* Expand a call to one of the builtin math functions that operate on
2484 floating point argument and output an integer result (ilogb, isinf,
2486 Return 0 if a normal call should be emitted rather than expanding the
2487 function in-line. EXP is the expression that is a call to the builtin
2488 function; if convenient, the result should be placed in TARGET. */
2491 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2493 enum insn_code icode
= CODE_FOR_nothing
;
2495 tree fndecl
= get_callee_fndecl (exp
);
2499 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2502 arg
= CALL_EXPR_ARG (exp
, 0);
2503 icode
= interclass_mathfn_icode (arg
, fndecl
);
2504 mode
= TYPE_MODE (TREE_TYPE (arg
));
2506 if (icode
!= CODE_FOR_nothing
)
2508 struct expand_operand ops
[1];
2509 rtx_insn
*last
= get_last_insn ();
2510 tree orig_arg
= arg
;
2512 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2513 need to expand the argument again. This way, we will not perform
2514 side-effects more the once. */
2515 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2517 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2519 if (mode
!= GET_MODE (op0
))
2520 op0
= convert_to_mode (mode
, op0
, 0);
2522 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2523 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2524 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2525 return ops
[0].value
;
2527 delete_insns_since (last
);
2528 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2534 /* Expand a call to the builtin sincos math function.
2535 Return NULL_RTX if a normal call should be emitted rather than expanding the
2536 function in-line. EXP is the expression that is a call to the builtin
2540 expand_builtin_sincos (tree exp
)
2542 rtx op0
, op1
, op2
, target1
, target2
;
2544 tree arg
, sinp
, cosp
;
2546 location_t loc
= EXPR_LOCATION (exp
);
2547 tree alias_type
, alias_off
;
2549 if (!validate_arglist (exp
, REAL_TYPE
,
2550 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2553 arg
= CALL_EXPR_ARG (exp
, 0);
2554 sinp
= CALL_EXPR_ARG (exp
, 1);
2555 cosp
= CALL_EXPR_ARG (exp
, 2);
2557 /* Make a suitable register to place result in. */
2558 mode
= TYPE_MODE (TREE_TYPE (arg
));
2560 /* Check if sincos insn is available, otherwise emit the call. */
2561 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2564 target1
= gen_reg_rtx (mode
);
2565 target2
= gen_reg_rtx (mode
);
2567 op0
= expand_normal (arg
);
2568 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2569 alias_off
= build_int_cst (alias_type
, 0);
2570 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2572 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2575 /* Compute into target1 and target2.
2576 Set TARGET to wherever the result comes back. */
2577 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2578 gcc_assert (result
);
2580 /* Move target1 and target2 to the memory locations indicated
2582 emit_move_insn (op1
, target1
);
2583 emit_move_insn (op2
, target2
);
2588 /* Expand a call to the internal cexpi builtin to the sincos math function.
2589 EXP is the expression that is a call to the builtin function; if convenient,
2590 the result should be placed in TARGET. */
2593 expand_builtin_cexpi (tree exp
, rtx target
)
2595 tree fndecl
= get_callee_fndecl (exp
);
2599 location_t loc
= EXPR_LOCATION (exp
);
2601 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2604 arg
= CALL_EXPR_ARG (exp
, 0);
2605 type
= TREE_TYPE (arg
);
2606 mode
= TYPE_MODE (TREE_TYPE (arg
));
2608 /* Try expanding via a sincos optab, fall back to emitting a libcall
2609 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2610 is only generated from sincos, cexp or if we have either of them. */
2611 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2613 op1
= gen_reg_rtx (mode
);
2614 op2
= gen_reg_rtx (mode
);
2616 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2618 /* Compute into op1 and op2. */
2619 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2621 else if (targetm
.libc_has_function (function_sincos
))
2623 tree call
, fn
= NULL_TREE
;
2627 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2628 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2629 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2630 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2631 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2632 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2636 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2637 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2638 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2639 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2640 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2641 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2643 /* Make sure not to fold the sincos call again. */
2644 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2645 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2646 call
, 3, arg
, top1
, top2
));
2650 tree call
, fn
= NULL_TREE
, narg
;
2651 tree ctype
= build_complex_type (type
);
2653 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2654 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2655 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2656 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2657 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2658 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2662 /* If we don't have a decl for cexp create one. This is the
2663 friendliest fallback if the user calls __builtin_cexpi
2664 without full target C99 function support. */
2665 if (fn
== NULL_TREE
)
2668 const char *name
= NULL
;
2670 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2672 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2674 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2677 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2678 fn
= build_fn_decl (name
, fntype
);
2681 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2682 build_real (type
, dconst0
), arg
);
2684 /* Make sure not to fold the cexp call again. */
2685 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2686 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2687 target
, VOIDmode
, EXPAND_NORMAL
);
2690 /* Now build the proper return type. */
2691 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2692 make_tree (TREE_TYPE (arg
), op2
),
2693 make_tree (TREE_TYPE (arg
), op1
)),
2694 target
, VOIDmode
, EXPAND_NORMAL
);
2697 /* Conveniently construct a function call expression. FNDECL names the
2698 function to be called, N is the number of arguments, and the "..."
2699 parameters are the argument expressions. Unlike build_call_exr
2700 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2703 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2706 tree fntype
= TREE_TYPE (fndecl
);
2707 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2710 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2712 SET_EXPR_LOCATION (fn
, loc
);
2716 /* Expand a call to one of the builtin rounding functions gcc defines
2717 as an extension (lfloor and lceil). As these are gcc extensions we
2718 do not need to worry about setting errno to EDOM.
2719 If expanding via optab fails, lower expression to (int)(floor(x)).
2720 EXP is the expression that is a call to the builtin function;
2721 if convenient, the result should be placed in TARGET. */
2724 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2726 convert_optab builtin_optab
;
2729 tree fndecl
= get_callee_fndecl (exp
);
2730 enum built_in_function fallback_fn
;
2731 tree fallback_fndecl
;
2735 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2738 arg
= CALL_EXPR_ARG (exp
, 0);
2740 switch (DECL_FUNCTION_CODE (fndecl
))
2742 CASE_FLT_FN (BUILT_IN_ICEIL
):
2743 CASE_FLT_FN (BUILT_IN_LCEIL
):
2744 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2745 builtin_optab
= lceil_optab
;
2746 fallback_fn
= BUILT_IN_CEIL
;
2749 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2750 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2751 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2752 builtin_optab
= lfloor_optab
;
2753 fallback_fn
= BUILT_IN_FLOOR
;
2760 /* Make a suitable register to place result in. */
2761 mode
= TYPE_MODE (TREE_TYPE (exp
));
2763 target
= gen_reg_rtx (mode
);
2765 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2766 need to expand the argument again. This way, we will not perform
2767 side-effects more the once. */
2768 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2770 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2774 /* Compute into TARGET. */
2775 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2777 /* Output the entire sequence. */
2778 insns
= get_insns ();
2784 /* If we were unable to expand via the builtin, stop the sequence
2785 (without outputting the insns). */
2788 /* Fall back to floating point rounding optab. */
2789 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2791 /* For non-C99 targets we may end up without a fallback fndecl here
2792 if the user called __builtin_lfloor directly. In this case emit
2793 a call to the floor/ceil variants nevertheless. This should result
2794 in the best user experience for not full C99 targets. */
2795 if (fallback_fndecl
== NULL_TREE
)
2798 const char *name
= NULL
;
2800 switch (DECL_FUNCTION_CODE (fndecl
))
2802 case BUILT_IN_ICEIL
:
2803 case BUILT_IN_LCEIL
:
2804 case BUILT_IN_LLCEIL
:
2807 case BUILT_IN_ICEILF
:
2808 case BUILT_IN_LCEILF
:
2809 case BUILT_IN_LLCEILF
:
2812 case BUILT_IN_ICEILL
:
2813 case BUILT_IN_LCEILL
:
2814 case BUILT_IN_LLCEILL
:
2817 case BUILT_IN_IFLOOR
:
2818 case BUILT_IN_LFLOOR
:
2819 case BUILT_IN_LLFLOOR
:
2822 case BUILT_IN_IFLOORF
:
2823 case BUILT_IN_LFLOORF
:
2824 case BUILT_IN_LLFLOORF
:
2827 case BUILT_IN_IFLOORL
:
2828 case BUILT_IN_LFLOORL
:
2829 case BUILT_IN_LLFLOORL
:
2836 fntype
= build_function_type_list (TREE_TYPE (arg
),
2837 TREE_TYPE (arg
), NULL_TREE
);
2838 fallback_fndecl
= build_fn_decl (name
, fntype
);
2841 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2843 tmp
= expand_normal (exp
);
2844 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2846 /* Truncate the result of floating point optab to integer
2847 via expand_fix (). */
2848 target
= gen_reg_rtx (mode
);
2849 expand_fix (target
, tmp
, 0);
2854 /* Expand a call to one of the builtin math functions doing integer
2856 Return 0 if a normal call should be emitted rather than expanding the
2857 function in-line. EXP is the expression that is a call to the builtin
2858 function; if convenient, the result should be placed in TARGET. */
2861 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2863 convert_optab builtin_optab
;
2866 tree fndecl
= get_callee_fndecl (exp
);
2869 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2871 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2874 arg
= CALL_EXPR_ARG (exp
, 0);
2876 switch (DECL_FUNCTION_CODE (fndecl
))
2878 CASE_FLT_FN (BUILT_IN_IRINT
):
2879 fallback_fn
= BUILT_IN_LRINT
;
2881 CASE_FLT_FN (BUILT_IN_LRINT
):
2882 CASE_FLT_FN (BUILT_IN_LLRINT
):
2883 builtin_optab
= lrint_optab
;
2886 CASE_FLT_FN (BUILT_IN_IROUND
):
2887 fallback_fn
= BUILT_IN_LROUND
;
2889 CASE_FLT_FN (BUILT_IN_LROUND
):
2890 CASE_FLT_FN (BUILT_IN_LLROUND
):
2891 builtin_optab
= lround_optab
;
2898 /* There's no easy way to detect the case we need to set EDOM. */
2899 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2902 /* Make a suitable register to place result in. */
2903 mode
= TYPE_MODE (TREE_TYPE (exp
));
2905 /* There's no easy way to detect the case we need to set EDOM. */
2906 if (!flag_errno_math
)
2908 rtx result
= gen_reg_rtx (mode
);
2910 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2911 need to expand the argument again. This way, we will not perform
2912 side-effects more the once. */
2913 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2915 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2919 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2921 /* Output the entire sequence. */
2922 insns
= get_insns ();
2928 /* If we were unable to expand via the builtin, stop the sequence
2929 (without outputting the insns) and call to the library function
2930 with the stabilized argument list. */
2934 if (fallback_fn
!= BUILT_IN_NONE
)
2936 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2937 targets, (int) round (x) should never be transformed into
2938 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2939 a call to lround in the hope that the target provides at least some
2940 C99 functions. This should result in the best user experience for
2941 not full C99 targets. */
2942 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2945 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2946 fallback_fndecl
, 1, arg
);
2948 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2949 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2950 return convert_to_mode (mode
, target
, 0);
2953 return expand_call (exp
, target
, target
== const0_rtx
);
2956 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2957 a normal call should be emitted rather than expanding the function
2958 in-line. EXP is the expression that is a call to the builtin
2959 function; if convenient, the result should be placed in TARGET. */
2962 expand_builtin_powi (tree exp
, rtx target
)
2969 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2972 arg0
= CALL_EXPR_ARG (exp
, 0);
2973 arg1
= CALL_EXPR_ARG (exp
, 1);
2974 mode
= TYPE_MODE (TREE_TYPE (exp
));
2976 /* Emit a libcall to libgcc. */
2978 /* Mode of the 2nd argument must match that of an int. */
2979 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2981 if (target
== NULL_RTX
)
2982 target
= gen_reg_rtx (mode
);
2984 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2985 if (GET_MODE (op0
) != mode
)
2986 op0
= convert_to_mode (mode
, op0
, 0);
2987 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2988 if (GET_MODE (op1
) != mode2
)
2989 op1
= convert_to_mode (mode2
, op1
, 0);
2991 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2992 target
, LCT_CONST
, mode
, 2,
2993 op0
, mode
, op1
, mode2
);
2998 /* Expand expression EXP which is a call to the strlen builtin. Return
2999 NULL_RTX if we failed the caller should emit a normal call, otherwise
3000 try to get the result in TARGET, if convenient. */
3003 expand_builtin_strlen (tree exp
, rtx target
,
3004 machine_mode target_mode
)
3006 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3010 struct expand_operand ops
[4];
3013 tree src
= CALL_EXPR_ARG (exp
, 0);
3015 rtx_insn
*before_strlen
;
3016 machine_mode insn_mode
= target_mode
;
3017 enum insn_code icode
= CODE_FOR_nothing
;
3020 /* If the length can be computed at compile-time, return it. */
3021 len
= c_strlen (src
, 0);
3023 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3025 /* If the length can be computed at compile-time and is constant
3026 integer, but there are side-effects in src, evaluate
3027 src for side-effects, then return len.
3028 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3029 can be optimized into: i++; x = 3; */
3030 len
= c_strlen (src
, 1);
3031 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3033 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3034 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3037 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3039 /* If SRC is not a pointer type, don't do this operation inline. */
3043 /* Bail out if we can't compute strlen in the right mode. */
3044 while (insn_mode
!= VOIDmode
)
3046 icode
= optab_handler (strlen_optab
, insn_mode
);
3047 if (icode
!= CODE_FOR_nothing
)
3050 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3052 if (insn_mode
== VOIDmode
)
3055 /* Make a place to hold the source address. We will not expand
3056 the actual source until we are sure that the expansion will
3057 not fail -- there are trees that cannot be expanded twice. */
3058 src_reg
= gen_reg_rtx (Pmode
);
3060 /* Mark the beginning of the strlen sequence so we can emit the
3061 source operand later. */
3062 before_strlen
= get_last_insn ();
3064 create_output_operand (&ops
[0], target
, insn_mode
);
3065 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3066 create_integer_operand (&ops
[2], 0);
3067 create_integer_operand (&ops
[3], align
);
3068 if (!maybe_expand_insn (icode
, 4, ops
))
3071 /* Now that we are assured of success, expand the source. */
3073 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3076 #ifdef POINTERS_EXTEND_UNSIGNED
3077 if (GET_MODE (pat
) != Pmode
)
3078 pat
= convert_to_mode (Pmode
, pat
,
3079 POINTERS_EXTEND_UNSIGNED
);
3081 emit_move_insn (src_reg
, pat
);
3087 emit_insn_after (pat
, before_strlen
);
3089 emit_insn_before (pat
, get_insns ());
3091 /* Return the value in the proper mode for this function. */
3092 if (GET_MODE (ops
[0].value
) == target_mode
)
3093 target
= ops
[0].value
;
3094 else if (target
!= 0)
3095 convert_move (target
, ops
[0].value
, 0);
3097 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3103 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3104 bytes from constant string DATA + OFFSET and return it as target
3108 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3111 const char *str
= (const char *) data
;
3113 gcc_assert (offset
>= 0
3114 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3115 <= strlen (str
) + 1));
3117 return c_readstr (str
+ offset
, mode
);
3120 /* LEN specify length of the block of memcpy/memset operation.
3121 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3122 In some cases we can make very likely guess on max size, then we
3123 set it into PROBABLE_MAX_SIZE. */
3126 determine_block_size (tree len
, rtx len_rtx
,
3127 unsigned HOST_WIDE_INT
*min_size
,
3128 unsigned HOST_WIDE_INT
*max_size
,
3129 unsigned HOST_WIDE_INT
*probable_max_size
)
3131 if (CONST_INT_P (len_rtx
))
3133 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3139 enum value_range_type range_type
= VR_UNDEFINED
;
3141 /* Determine bounds from the type. */
3142 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3143 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3146 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3147 *probable_max_size
= *max_size
3148 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3150 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3152 if (TREE_CODE (len
) == SSA_NAME
)
3153 range_type
= get_range_info (len
, &min
, &max
);
3154 if (range_type
== VR_RANGE
)
3156 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3157 *min_size
= min
.to_uhwi ();
3158 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3159 *probable_max_size
= *max_size
= max
.to_uhwi ();
3161 else if (range_type
== VR_ANTI_RANGE
)
3163 /* Anti range 0...N lets us to determine minimal size to N+1. */
3166 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3167 *min_size
= max
.to_uhwi () + 1;
3175 Produce anti range allowing negative values of N. We still
3176 can use the information and make a guess that N is not negative.
3178 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3179 *probable_max_size
= min
.to_uhwi () - 1;
3182 gcc_checking_assert (*max_size
<=
3183 (unsigned HOST_WIDE_INT
)
3184 GET_MODE_MASK (GET_MODE (len_rtx
)));
3187 /* Helper function to do the actual work for expand_builtin_memcpy. */
3190 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3192 const char *src_str
;
3193 unsigned int src_align
= get_pointer_alignment (src
);
3194 unsigned int dest_align
= get_pointer_alignment (dest
);
3195 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3196 HOST_WIDE_INT expected_size
= -1;
3197 unsigned int expected_align
= 0;
3198 unsigned HOST_WIDE_INT min_size
;
3199 unsigned HOST_WIDE_INT max_size
;
3200 unsigned HOST_WIDE_INT probable_max_size
;
3202 /* If DEST is not a pointer type, call the normal function. */
3203 if (dest_align
== 0)
3206 /* If either SRC is not a pointer type, don't do this
3207 operation in-line. */
3211 if (currently_expanding_gimple_stmt
)
3212 stringop_block_profile (currently_expanding_gimple_stmt
,
3213 &expected_align
, &expected_size
);
3215 if (expected_align
< dest_align
)
3216 expected_align
= dest_align
;
3217 dest_mem
= get_memory_rtx (dest
, len
);
3218 set_mem_align (dest_mem
, dest_align
);
3219 len_rtx
= expand_normal (len
);
3220 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3221 &probable_max_size
);
3222 src_str
= c_getstr (src
);
3224 /* If SRC is a string constant and block move would be done
3225 by pieces, we can avoid loading the string from memory
3226 and only stored the computed constants. */
3228 && CONST_INT_P (len_rtx
)
3229 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3230 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3231 CONST_CAST (char *, src_str
),
3234 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3235 builtin_memcpy_read_str
,
3236 CONST_CAST (char *, src_str
),
3237 dest_align
, false, 0);
3238 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3239 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3243 src_mem
= get_memory_rtx (src
, len
);
3244 set_mem_align (src_mem
, src_align
);
3246 /* Copy word part most expediently. */
3247 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3248 CALL_EXPR_TAILCALL (exp
)
3249 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3250 expected_align
, expected_size
,
3251 min_size
, max_size
, probable_max_size
);
3255 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3256 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3262 /* Expand a call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3268 expand_builtin_memcpy (tree exp
, rtx target
)
3270 if (!validate_arglist (exp
,
3271 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3275 tree dest
= CALL_EXPR_ARG (exp
, 0);
3276 tree src
= CALL_EXPR_ARG (exp
, 1);
3277 tree len
= CALL_EXPR_ARG (exp
, 2);
3278 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3282 /* Expand an instrumented call EXP to the memcpy builtin.
3283 Return NULL_RTX if we failed, the caller should emit a normal call,
3284 otherwise try to get the result in TARGET, if convenient (and in
3285 mode MODE if that's convenient). */
3288 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3290 if (!validate_arglist (exp
,
3291 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3292 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3293 INTEGER_TYPE
, VOID_TYPE
))
3297 tree dest
= CALL_EXPR_ARG (exp
, 0);
3298 tree src
= CALL_EXPR_ARG (exp
, 2);
3299 tree len
= CALL_EXPR_ARG (exp
, 4);
3300 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3302 /* Return src bounds with the result. */
3305 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3306 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3307 res
= chkp_join_splitted_slot (res
, bnd
);
3313 /* Expand a call EXP to the mempcpy builtin.
3314 Return NULL_RTX if we failed; the caller should emit a normal call,
3315 otherwise try to get the result in TARGET, if convenient (and in
3316 mode MODE if that's convenient). If ENDP is 0 return the
3317 destination pointer, if ENDP is 1 return the end pointer ala
3318 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3322 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3324 if (!validate_arglist (exp
,
3325 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3329 tree dest
= CALL_EXPR_ARG (exp
, 0);
3330 tree src
= CALL_EXPR_ARG (exp
, 1);
3331 tree len
= CALL_EXPR_ARG (exp
, 2);
3332 return expand_builtin_mempcpy_args (dest
, src
, len
,
3333 target
, mode
, /*endp=*/ 1,
3338 /* Expand an instrumented call EXP to the mempcpy builtin.
3339 Return NULL_RTX if we failed, the caller should emit a normal call,
3340 otherwise try to get the result in TARGET, if convenient (and in
3341 mode MODE if that's convenient). */
3344 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3346 if (!validate_arglist (exp
,
3347 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3348 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3349 INTEGER_TYPE
, VOID_TYPE
))
3353 tree dest
= CALL_EXPR_ARG (exp
, 0);
3354 tree src
= CALL_EXPR_ARG (exp
, 2);
3355 tree len
= CALL_EXPR_ARG (exp
, 4);
3356 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3359 /* Return src bounds with the result. */
3362 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3363 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3364 res
= chkp_join_splitted_slot (res
, bnd
);
3370 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3371 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3372 so that this can also be called without constructing an actual CALL_EXPR.
3373 The other arguments and return value are the same as for
3374 expand_builtin_mempcpy. */
3377 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3378 rtx target
, machine_mode mode
, int endp
,
3381 tree fndecl
= get_callee_fndecl (orig_exp
);
3383 /* If return value is ignored, transform mempcpy into memcpy. */
3384 if (target
== const0_rtx
3385 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3386 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3388 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3389 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3391 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3393 else if (target
== const0_rtx
3394 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3396 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3397 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3399 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3403 const char *src_str
;
3404 unsigned int src_align
= get_pointer_alignment (src
);
3405 unsigned int dest_align
= get_pointer_alignment (dest
);
3406 rtx dest_mem
, src_mem
, len_rtx
;
3408 /* If either SRC or DEST is not a pointer type, don't do this
3409 operation in-line. */
3410 if (dest_align
== 0 || src_align
== 0)
3413 /* If LEN is not constant, call the normal function. */
3414 if (! tree_fits_uhwi_p (len
))
3417 len_rtx
= expand_normal (len
);
3418 src_str
= c_getstr (src
);
3420 /* If SRC is a string constant and block move would be done
3421 by pieces, we can avoid loading the string from memory
3422 and only stored the computed constants. */
3424 && CONST_INT_P (len_rtx
)
3425 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3426 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3427 CONST_CAST (char *, src_str
),
3430 dest_mem
= get_memory_rtx (dest
, len
);
3431 set_mem_align (dest_mem
, dest_align
);
3432 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3433 builtin_memcpy_read_str
,
3434 CONST_CAST (char *, src_str
),
3435 dest_align
, false, endp
);
3436 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3437 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3441 if (CONST_INT_P (len_rtx
)
3442 && can_move_by_pieces (INTVAL (len_rtx
),
3443 MIN (dest_align
, src_align
)))
3445 dest_mem
= get_memory_rtx (dest
, len
);
3446 set_mem_align (dest_mem
, dest_align
);
3447 src_mem
= get_memory_rtx (src
, len
);
3448 set_mem_align (src_mem
, src_align
);
3449 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3450 MIN (dest_align
, src_align
), endp
);
3451 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3452 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3461 # define HAVE_movstr 0
3462 # define CODE_FOR_movstr CODE_FOR_nothing
3465 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3466 we failed, the caller should emit a normal call, otherwise try to
3467 get the result in TARGET, if convenient. If ENDP is 0 return the
3468 destination pointer, if ENDP is 1 return the end pointer ala
3469 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3473 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3475 struct expand_operand ops
[3];
3482 dest_mem
= get_memory_rtx (dest
, NULL
);
3483 src_mem
= get_memory_rtx (src
, NULL
);
3486 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3487 dest_mem
= replace_equiv_address (dest_mem
, target
);
3490 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3491 create_fixed_operand (&ops
[1], dest_mem
);
3492 create_fixed_operand (&ops
[2], src_mem
);
3493 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3496 if (endp
&& target
!= const0_rtx
)
3498 target
= ops
[0].value
;
3499 /* movstr is supposed to set end to the address of the NUL
3500 terminator. If the caller requested a mempcpy-like return value,
3504 rtx tem
= plus_constant (GET_MODE (target
),
3505 gen_lowpart (GET_MODE (target
), target
), 1);
3506 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3512 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3513 NULL_RTX if we failed the caller should emit a normal call, otherwise
3514 try to get the result in TARGET, if convenient (and in mode MODE if that's
3518 expand_builtin_strcpy (tree exp
, rtx target
)
3520 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3522 tree dest
= CALL_EXPR_ARG (exp
, 0);
3523 tree src
= CALL_EXPR_ARG (exp
, 1);
3524 return expand_builtin_strcpy_args (dest
, src
, target
);
3529 /* Helper function to do the actual work for expand_builtin_strcpy. The
3530 arguments to the builtin_strcpy call DEST and SRC are broken out
3531 so that this can also be called without constructing an actual CALL_EXPR.
3532 The other arguments and return value are the same as for
3533 expand_builtin_strcpy. */
3536 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3538 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3541 /* Expand a call EXP to the stpcpy builtin.
3542 Return NULL_RTX if we failed the caller should emit a normal call,
3543 otherwise try to get the result in TARGET, if convenient (and in
3544 mode MODE if that's convenient). */
3547 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3550 location_t loc
= EXPR_LOCATION (exp
);
3552 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3555 dst
= CALL_EXPR_ARG (exp
, 0);
3556 src
= CALL_EXPR_ARG (exp
, 1);
3558 /* If return value is ignored, transform stpcpy into strcpy. */
3559 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3561 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3562 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3563 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3570 /* Ensure we get an actual string whose length can be evaluated at
3571 compile-time, not an expression containing a string. This is
3572 because the latter will potentially produce pessimized code
3573 when used to produce the return value. */
3574 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3575 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3577 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3578 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3579 target
, mode
, /*endp=*/2,
3585 if (TREE_CODE (len
) == INTEGER_CST
)
3587 rtx len_rtx
= expand_normal (len
);
3589 if (CONST_INT_P (len_rtx
))
3591 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3597 if (mode
!= VOIDmode
)
3598 target
= gen_reg_rtx (mode
);
3600 target
= gen_reg_rtx (GET_MODE (ret
));
3602 if (GET_MODE (target
) != GET_MODE (ret
))
3603 ret
= gen_lowpart (GET_MODE (target
), ret
);
3605 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3606 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3614 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3618 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3619 bytes from constant string DATA + OFFSET and return it as target
3623 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3626 const char *str
= (const char *) data
;
3628 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3631 return c_readstr (str
+ offset
, mode
);
3634 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3635 NULL_RTX if we failed the caller should emit a normal call. */
3638 expand_builtin_strncpy (tree exp
, rtx target
)
3640 location_t loc
= EXPR_LOCATION (exp
);
3642 if (validate_arglist (exp
,
3643 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3645 tree dest
= CALL_EXPR_ARG (exp
, 0);
3646 tree src
= CALL_EXPR_ARG (exp
, 1);
3647 tree len
= CALL_EXPR_ARG (exp
, 2);
3648 tree slen
= c_strlen (src
, 1);
3650 /* We must be passed a constant len and src parameter. */
3651 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3654 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3656 /* We're required to pad with trailing zeros if the requested
3657 len is greater than strlen(s2)+1. In that case try to
3658 use store_by_pieces, if it fails, punt. */
3659 if (tree_int_cst_lt (slen
, len
))
3661 unsigned int dest_align
= get_pointer_alignment (dest
);
3662 const char *p
= c_getstr (src
);
3665 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3666 || !can_store_by_pieces (tree_to_uhwi (len
),
3667 builtin_strncpy_read_str
,
3668 CONST_CAST (char *, p
),
3672 dest_mem
= get_memory_rtx (dest
, len
);
3673 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3674 builtin_strncpy_read_str
,
3675 CONST_CAST (char *, p
), dest_align
, false, 0);
3676 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3677 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3684 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3685 bytes from constant string DATA + OFFSET and return it as target
3689 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3692 const char *c
= (const char *) data
;
3693 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3695 memset (p
, *c
, GET_MODE_SIZE (mode
));
3697 return c_readstr (p
, mode
);
3700 /* Callback routine for store_by_pieces. Return the RTL of a register
3701 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3702 char value given in the RTL register data. For example, if mode is
3703 4 bytes wide, return the RTL for 0x01010101*data. */
3706 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3713 size
= GET_MODE_SIZE (mode
);
3717 p
= XALLOCAVEC (char, size
);
3718 memset (p
, 1, size
);
3719 coeff
= c_readstr (p
, mode
);
3721 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3722 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3723 return force_reg (mode
, target
);
3726 /* Expand expression EXP, which is a call to the memset builtin. Return
3727 NULL_RTX if we failed the caller should emit a normal call, otherwise
3728 try to get the result in TARGET, if convenient (and in mode MODE if that's
3732 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3734 if (!validate_arglist (exp
,
3735 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3739 tree dest
= CALL_EXPR_ARG (exp
, 0);
3740 tree val
= CALL_EXPR_ARG (exp
, 1);
3741 tree len
= CALL_EXPR_ARG (exp
, 2);
3742 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3746 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3747 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3748 try to get the result in TARGET, if convenient (and in mode MODE if that's
3752 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3754 if (!validate_arglist (exp
,
3755 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3756 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3760 tree dest
= CALL_EXPR_ARG (exp
, 0);
3761 tree val
= CALL_EXPR_ARG (exp
, 2);
3762 tree len
= CALL_EXPR_ARG (exp
, 3);
3763 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3765 /* Return src bounds with the result. */
3768 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3769 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3770 res
= chkp_join_splitted_slot (res
, bnd
);
3776 /* Helper function to do the actual work for expand_builtin_memset. The
3777 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3778 so that this can also be called without constructing an actual CALL_EXPR.
3779 The other arguments and return value are the same as for
3780 expand_builtin_memset. */
3783 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3784 rtx target
, machine_mode mode
, tree orig_exp
)
3787 enum built_in_function fcode
;
3788 machine_mode val_mode
;
3790 unsigned int dest_align
;
3791 rtx dest_mem
, dest_addr
, len_rtx
;
3792 HOST_WIDE_INT expected_size
= -1;
3793 unsigned int expected_align
= 0;
3794 unsigned HOST_WIDE_INT min_size
;
3795 unsigned HOST_WIDE_INT max_size
;
3796 unsigned HOST_WIDE_INT probable_max_size
;
3798 dest_align
= get_pointer_alignment (dest
);
3800 /* If DEST is not a pointer type, don't do this operation in-line. */
3801 if (dest_align
== 0)
3804 if (currently_expanding_gimple_stmt
)
3805 stringop_block_profile (currently_expanding_gimple_stmt
,
3806 &expected_align
, &expected_size
);
3808 if (expected_align
< dest_align
)
3809 expected_align
= dest_align
;
3811 /* If the LEN parameter is zero, return DEST. */
3812 if (integer_zerop (len
))
3814 /* Evaluate and ignore VAL in case it has side-effects. */
3815 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3816 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3819 /* Stabilize the arguments in case we fail. */
3820 dest
= builtin_save_expr (dest
);
3821 val
= builtin_save_expr (val
);
3822 len
= builtin_save_expr (len
);
3824 len_rtx
= expand_normal (len
);
3825 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3826 &probable_max_size
);
3827 dest_mem
= get_memory_rtx (dest
, len
);
3828 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3830 if (TREE_CODE (val
) != INTEGER_CST
)
3834 val_rtx
= expand_normal (val
);
3835 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3837 /* Assume that we can memset by pieces if we can store
3838 * the coefficients by pieces (in the required modes).
3839 * We can't pass builtin_memset_gen_str as that emits RTL. */
3841 if (tree_fits_uhwi_p (len
)
3842 && can_store_by_pieces (tree_to_uhwi (len
),
3843 builtin_memset_read_str
, &c
, dest_align
,
3846 val_rtx
= force_reg (val_mode
, val_rtx
);
3847 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3848 builtin_memset_gen_str
, val_rtx
, dest_align
,
3851 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3852 dest_align
, expected_align
,
3853 expected_size
, min_size
, max_size
,
3857 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3858 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3862 if (target_char_cast (val
, &c
))
3867 if (tree_fits_uhwi_p (len
)
3868 && can_store_by_pieces (tree_to_uhwi (len
),
3869 builtin_memset_read_str
, &c
, dest_align
,
3871 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3872 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3873 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3874 gen_int_mode (c
, val_mode
),
3875 dest_align
, expected_align
,
3876 expected_size
, min_size
, max_size
,
3880 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3881 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3885 set_mem_align (dest_mem
, dest_align
);
3886 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3887 CALL_EXPR_TAILCALL (orig_exp
)
3888 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3889 expected_align
, expected_size
,
3895 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3896 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3902 fndecl
= get_callee_fndecl (orig_exp
);
3903 fcode
= DECL_FUNCTION_CODE (fndecl
);
3904 if (fcode
== BUILT_IN_MEMSET
3905 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3906 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3908 else if (fcode
== BUILT_IN_BZERO
)
3909 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3913 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3914 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3915 return expand_call (fn
, target
, target
== const0_rtx
);
3918 /* Expand expression EXP, which is a call to the bzero builtin. Return
3919 NULL_RTX if we failed the caller should emit a normal call. */
3922 expand_builtin_bzero (tree exp
)
3925 location_t loc
= EXPR_LOCATION (exp
);
3927 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3930 dest
= CALL_EXPR_ARG (exp
, 0);
3931 size
= CALL_EXPR_ARG (exp
, 1);
3933 /* New argument list transforming bzero(ptr x, int y) to
3934 memset(ptr x, int 0, size_t y). This is done this way
3935 so that if it isn't expanded inline, we fallback to
3936 calling bzero instead of memset. */
3938 return expand_builtin_memset_args (dest
, integer_zero_node
,
3939 fold_convert_loc (loc
,
3940 size_type_node
, size
),
3941 const0_rtx
, VOIDmode
, exp
);
3944 /* Expand expression EXP, which is a call to the memcmp built-in function.
3945 Return NULL_RTX if we failed and the caller should emit a normal call,
3946 otherwise try to get the result in TARGET, if convenient (and in mode
3947 MODE, if that's convenient). */
3950 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3951 ATTRIBUTE_UNUSED machine_mode mode
)
3953 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3955 if (!validate_arglist (exp
,
3956 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3959 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3960 implementing memcmp because it will stop if it encounters two
3962 #if defined HAVE_cmpmemsi
3964 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3967 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3968 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3969 tree len
= CALL_EXPR_ARG (exp
, 2);
3971 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3972 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3973 machine_mode insn_mode
;
3976 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3980 /* If we don't have POINTER_TYPE, call the function. */
3981 if (arg1_align
== 0 || arg2_align
== 0)
3984 /* Make a place to write the result of the instruction. */
3987 && REG_P (result
) && GET_MODE (result
) == insn_mode
3988 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3989 result
= gen_reg_rtx (insn_mode
);
3991 arg1_rtx
= get_memory_rtx (arg1
, len
);
3992 arg2_rtx
= get_memory_rtx (arg2
, len
);
3993 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3995 /* Set MEM_SIZE as appropriate. */
3996 if (CONST_INT_P (arg3_rtx
))
3998 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3999 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
4003 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4004 GEN_INT (MIN (arg1_align
, arg2_align
)));
4011 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4012 TYPE_MODE (integer_type_node
), 3,
4013 XEXP (arg1_rtx
, 0), Pmode
,
4014 XEXP (arg2_rtx
, 0), Pmode
,
4015 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4016 TYPE_UNSIGNED (sizetype
)),
4017 TYPE_MODE (sizetype
));
4019 /* Return the value in the proper mode for this function. */
4020 mode
= TYPE_MODE (TREE_TYPE (exp
));
4021 if (GET_MODE (result
) == mode
)
4023 else if (target
!= 0)
4025 convert_move (target
, result
, 0);
4029 return convert_to_mode (mode
, result
, 0);
4031 #endif /* HAVE_cmpmemsi. */
4036 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4037 if we failed the caller should emit a normal call, otherwise try to get
4038 the result in TARGET, if convenient. */
4041 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4043 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4046 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4047 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4048 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4050 rtx arg1_rtx
, arg2_rtx
;
4051 rtx result
, insn
= NULL_RTX
;
4053 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4054 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4056 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4057 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4059 /* If we don't have POINTER_TYPE, call the function. */
4060 if (arg1_align
== 0 || arg2_align
== 0)
4063 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4064 arg1
= builtin_save_expr (arg1
);
4065 arg2
= builtin_save_expr (arg2
);
4067 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4068 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4070 #ifdef HAVE_cmpstrsi
4071 /* Try to call cmpstrsi. */
4074 machine_mode insn_mode
4075 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4077 /* Make a place to write the result of the instruction. */
4080 && REG_P (result
) && GET_MODE (result
) == insn_mode
4081 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4082 result
= gen_reg_rtx (insn_mode
);
4084 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4085 GEN_INT (MIN (arg1_align
, arg2_align
)));
4088 #ifdef HAVE_cmpstrnsi
4089 /* Try to determine at least one length and call cmpstrnsi. */
4090 if (!insn
&& HAVE_cmpstrnsi
)
4095 machine_mode insn_mode
4096 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4097 tree len1
= c_strlen (arg1
, 1);
4098 tree len2
= c_strlen (arg2
, 1);
4101 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4103 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4105 /* If we don't have a constant length for the first, use the length
4106 of the second, if we know it. We don't require a constant for
4107 this case; some cost analysis could be done if both are available
4108 but neither is constant. For now, assume they're equally cheap,
4109 unless one has side effects. If both strings have constant lengths,
4116 else if (TREE_SIDE_EFFECTS (len1
))
4118 else if (TREE_SIDE_EFFECTS (len2
))
4120 else if (TREE_CODE (len1
) != INTEGER_CST
)
4122 else if (TREE_CODE (len2
) != INTEGER_CST
)
4124 else if (tree_int_cst_lt (len1
, len2
))
4129 /* If both arguments have side effects, we cannot optimize. */
4130 if (!len
|| TREE_SIDE_EFFECTS (len
))
4133 arg3_rtx
= expand_normal (len
);
4135 /* Make a place to write the result of the instruction. */
4138 && REG_P (result
) && GET_MODE (result
) == insn_mode
4139 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4140 result
= gen_reg_rtx (insn_mode
);
4142 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4143 GEN_INT (MIN (arg1_align
, arg2_align
)));
4152 /* Return the value in the proper mode for this function. */
4153 mode
= TYPE_MODE (TREE_TYPE (exp
));
4154 if (GET_MODE (result
) == mode
)
4157 return convert_to_mode (mode
, result
, 0);
4158 convert_move (target
, result
, 0);
4162 /* Expand the library call ourselves using a stabilized argument
4163 list to avoid re-evaluating the function's arguments twice. */
4164 #ifdef HAVE_cmpstrnsi
4167 fndecl
= get_callee_fndecl (exp
);
4168 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4169 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4170 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4171 return expand_call (fn
, target
, target
== const0_rtx
);
4177 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4178 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4179 the result in TARGET, if convenient. */
4182 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4183 ATTRIBUTE_UNUSED machine_mode mode
)
4185 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4187 if (!validate_arglist (exp
,
4188 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4191 /* If c_strlen can determine an expression for one of the string
4192 lengths, and it doesn't have side effects, then emit cmpstrnsi
4193 using length MIN(strlen(string)+1, arg3). */
4194 #ifdef HAVE_cmpstrnsi
4197 tree len
, len1
, len2
;
4198 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4201 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4202 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4203 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4205 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4206 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4207 machine_mode insn_mode
4208 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4210 len1
= c_strlen (arg1
, 1);
4211 len2
= c_strlen (arg2
, 1);
4214 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4216 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4218 /* If we don't have a constant length for the first, use the length
4219 of the second, if we know it. We don't require a constant for
4220 this case; some cost analysis could be done if both are available
4221 but neither is constant. For now, assume they're equally cheap,
4222 unless one has side effects. If both strings have constant lengths,
4229 else if (TREE_SIDE_EFFECTS (len1
))
4231 else if (TREE_SIDE_EFFECTS (len2
))
4233 else if (TREE_CODE (len1
) != INTEGER_CST
)
4235 else if (TREE_CODE (len2
) != INTEGER_CST
)
4237 else if (tree_int_cst_lt (len1
, len2
))
4242 /* If both arguments have side effects, we cannot optimize. */
4243 if (!len
|| TREE_SIDE_EFFECTS (len
))
4246 /* The actual new length parameter is MIN(len,arg3). */
4247 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4248 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4250 /* If we don't have POINTER_TYPE, call the function. */
4251 if (arg1_align
== 0 || arg2_align
== 0)
4254 /* Make a place to write the result of the instruction. */
4257 && REG_P (result
) && GET_MODE (result
) == insn_mode
4258 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4259 result
= gen_reg_rtx (insn_mode
);
4261 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4262 arg1
= builtin_save_expr (arg1
);
4263 arg2
= builtin_save_expr (arg2
);
4264 len
= builtin_save_expr (len
);
4266 arg1_rtx
= get_memory_rtx (arg1
, len
);
4267 arg2_rtx
= get_memory_rtx (arg2
, len
);
4268 arg3_rtx
= expand_normal (len
);
4269 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4270 GEN_INT (MIN (arg1_align
, arg2_align
)));
4275 /* Return the value in the proper mode for this function. */
4276 mode
= TYPE_MODE (TREE_TYPE (exp
));
4277 if (GET_MODE (result
) == mode
)
4280 return convert_to_mode (mode
, result
, 0);
4281 convert_move (target
, result
, 0);
4285 /* Expand the library call ourselves using a stabilized argument
4286 list to avoid re-evaluating the function's arguments twice. */
4287 fndecl
= get_callee_fndecl (exp
);
4288 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4290 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4291 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4292 return expand_call (fn
, target
, target
== const0_rtx
);
4298 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4299 if that's convenient. */
4302 expand_builtin_saveregs (void)
4307 /* Don't do __builtin_saveregs more than once in a function.
4308 Save the result of the first call and reuse it. */
4309 if (saveregs_value
!= 0)
4310 return saveregs_value
;
4312 /* When this function is called, it means that registers must be
4313 saved on entry to this function. So we migrate the call to the
4314 first insn of this function. */
4318 /* Do whatever the machine needs done in this case. */
4319 val
= targetm
.calls
.expand_builtin_saveregs ();
4324 saveregs_value
= val
;
4326 /* Put the insns after the NOTE that starts the function. If this
4327 is inside a start_sequence, make the outer-level insn chain current, so
4328 the code is placed at the start of the function. */
4329 push_topmost_sequence ();
4330 emit_insn_after (seq
, entry_of_function ());
4331 pop_topmost_sequence ();
4336 /* Expand a call to __builtin_next_arg. */
4339 expand_builtin_next_arg (void)
4341 /* Checking arguments is already done in fold_builtin_next_arg
4342 that must be called before this function. */
4343 return expand_binop (ptr_mode
, add_optab
,
4344 crtl
->args
.internal_arg_pointer
,
4345 crtl
->args
.arg_offset_rtx
,
4346 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4349 /* Make it easier for the backends by protecting the valist argument
4350 from multiple evaluations. */
4353 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4355 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4357 /* The current way of determining the type of valist is completely
4358 bogus. We should have the information on the va builtin instead. */
4360 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4362 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4364 if (TREE_SIDE_EFFECTS (valist
))
4365 valist
= save_expr (valist
);
4367 /* For this case, the backends will be expecting a pointer to
4368 vatype, but it's possible we've actually been given an array
4369 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4371 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4373 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4374 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4379 tree pt
= build_pointer_type (vatype
);
4383 if (! TREE_SIDE_EFFECTS (valist
))
4386 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4387 TREE_SIDE_EFFECTS (valist
) = 1;
4390 if (TREE_SIDE_EFFECTS (valist
))
4391 valist
= save_expr (valist
);
4392 valist
= fold_build2_loc (loc
, MEM_REF
,
4393 vatype
, valist
, build_int_cst (pt
, 0));
4399 /* The "standard" definition of va_list is void*. */
4402 std_build_builtin_va_list (void)
4404 return ptr_type_node
;
4407 /* The "standard" abi va_list is va_list_type_node. */
4410 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4412 return va_list_type_node
;
4415 /* The "standard" type of va_list is va_list_type_node. */
4418 std_canonical_va_list_type (tree type
)
4422 if (INDIRECT_REF_P (type
))
4423 type
= TREE_TYPE (type
);
4424 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4425 type
= TREE_TYPE (type
);
4426 wtype
= va_list_type_node
;
4428 /* Treat structure va_list types. */
4429 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4430 htype
= TREE_TYPE (htype
);
4431 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4433 /* If va_list is an array type, the argument may have decayed
4434 to a pointer type, e.g. by being passed to another function.
4435 In that case, unwrap both types so that we can compare the
4436 underlying records. */
4437 if (TREE_CODE (htype
) == ARRAY_TYPE
4438 || POINTER_TYPE_P (htype
))
4440 wtype
= TREE_TYPE (wtype
);
4441 htype
= TREE_TYPE (htype
);
4444 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4445 return va_list_type_node
;
4450 /* The "standard" implementation of va_start: just assign `nextarg' to
4454 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4456 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4457 convert_move (va_r
, nextarg
, 0);
4459 /* We do not have any valid bounds for the pointer, so
4460 just store zero bounds for it. */
4461 if (chkp_function_instrumented_p (current_function_decl
))
4462 chkp_expand_bounds_reset_for_mem (valist
,
4463 make_tree (TREE_TYPE (valist
),
4467 /* Expand EXP, a call to __builtin_va_start. */
4470 expand_builtin_va_start (tree exp
)
4474 location_t loc
= EXPR_LOCATION (exp
);
4476 if (call_expr_nargs (exp
) < 2)
4478 error_at (loc
, "too few arguments to function %<va_start%>");
4482 if (fold_builtin_next_arg (exp
, true))
4485 nextarg
= expand_builtin_next_arg ();
4486 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4488 if (targetm
.expand_builtin_va_start
)
4489 targetm
.expand_builtin_va_start (valist
, nextarg
);
4491 std_expand_builtin_va_start (valist
, nextarg
);
4496 /* Expand EXP, a call to __builtin_va_end. */
4499 expand_builtin_va_end (tree exp
)
4501 tree valist
= CALL_EXPR_ARG (exp
, 0);
4503 /* Evaluate for side effects, if needed. I hate macros that don't
4505 if (TREE_SIDE_EFFECTS (valist
))
4506 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4511 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4512 builtin rather than just as an assignment in stdarg.h because of the
4513 nastiness of array-type va_list types. */
4516 expand_builtin_va_copy (tree exp
)
4519 location_t loc
= EXPR_LOCATION (exp
);
4521 dst
= CALL_EXPR_ARG (exp
, 0);
4522 src
= CALL_EXPR_ARG (exp
, 1);
4524 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4525 src
= stabilize_va_list_loc (loc
, src
, 0);
4527 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4529 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4531 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4532 TREE_SIDE_EFFECTS (t
) = 1;
4533 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4537 rtx dstb
, srcb
, size
;
4539 /* Evaluate to pointers. */
4540 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4541 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4542 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4543 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4545 dstb
= convert_memory_address (Pmode
, dstb
);
4546 srcb
= convert_memory_address (Pmode
, srcb
);
4548 /* "Dereference" to BLKmode memories. */
4549 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4550 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4551 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4552 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4553 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4554 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4557 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4563 /* Expand a call to one of the builtin functions __builtin_frame_address or
4564 __builtin_return_address. */
4567 expand_builtin_frame_address (tree fndecl
, tree exp
)
4569 /* The argument must be a nonnegative integer constant.
4570 It counts the number of frames to scan up the stack.
4571 The value is the return address saved in that frame. */
4572 if (call_expr_nargs (exp
) == 0)
4573 /* Warning about missing arg was already issued. */
4575 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4577 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4578 error ("invalid argument to %<__builtin_frame_address%>");
4580 error ("invalid argument to %<__builtin_return_address%>");
4586 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4587 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4589 /* Some ports cannot access arbitrary stack frames. */
4592 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4593 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4595 warning (0, "unsupported argument to %<__builtin_return_address%>");
4599 /* For __builtin_frame_address, return what we've got. */
4600 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4604 && ! CONSTANT_P (tem
))
4605 tem
= copy_addr_to_reg (tem
);
4610 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4611 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4612 is the same as for allocate_dynamic_stack_space. */
4615 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4621 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4622 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4625 = (alloca_with_align
4626 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4627 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4632 /* Compute the argument. */
4633 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4635 /* Compute the alignment. */
4636 align
= (alloca_with_align
4637 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4638 : BIGGEST_ALIGNMENT
);
4640 /* Allocate the desired space. */
4641 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4642 result
= convert_memory_address (ptr_mode
, result
);
4647 /* Expand a call to bswap builtin in EXP.
4648 Return NULL_RTX if a normal call should be emitted rather than expanding the
4649 function in-line. If convenient, the result should be placed in TARGET.
4650 SUBTARGET may be used as the target for computing one of EXP's operands. */
4653 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4659 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4662 arg
= CALL_EXPR_ARG (exp
, 0);
4663 op0
= expand_expr (arg
,
4664 subtarget
&& GET_MODE (subtarget
) == target_mode
4665 ? subtarget
: NULL_RTX
,
4666 target_mode
, EXPAND_NORMAL
);
4667 if (GET_MODE (op0
) != target_mode
)
4668 op0
= convert_to_mode (target_mode
, op0
, 1);
4670 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4672 gcc_assert (target
);
4674 return convert_to_mode (target_mode
, target
, 1);
4677 /* Expand a call to a unary builtin in EXP.
4678 Return NULL_RTX if a normal call should be emitted rather than expanding the
4679 function in-line. If convenient, the result should be placed in TARGET.
4680 SUBTARGET may be used as the target for computing one of EXP's operands. */
4683 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4684 rtx subtarget
, optab op_optab
)
4688 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4691 /* Compute the argument. */
4692 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4694 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4695 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4696 VOIDmode
, EXPAND_NORMAL
);
4697 /* Compute op, into TARGET if possible.
4698 Set TARGET to wherever the result comes back. */
4699 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4700 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4701 gcc_assert (target
);
4703 return convert_to_mode (target_mode
, target
, 0);
4706 /* Expand a call to __builtin_expect. We just return our argument
4707 as the builtin_expect semantic should've been already executed by
4708 tree branch prediction pass. */
4711 expand_builtin_expect (tree exp
, rtx target
)
4715 if (call_expr_nargs (exp
) < 2)
4717 arg
= CALL_EXPR_ARG (exp
, 0);
4719 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4720 /* When guessing was done, the hints should be already stripped away. */
4721 gcc_assert (!flag_guess_branch_prob
4722 || optimize
== 0 || seen_error ());
4726 /* Expand a call to __builtin_assume_aligned. We just return our first
4727 argument as the builtin_assume_aligned semantic should've been already
4731 expand_builtin_assume_aligned (tree exp
, rtx target
)
4733 if (call_expr_nargs (exp
) < 2)
4735 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4737 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4738 && (call_expr_nargs (exp
) < 3
4739 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4744 expand_builtin_trap (void)
4749 rtx insn
= emit_insn (gen_trap ());
4750 /* For trap insns when not accumulating outgoing args force
4751 REG_ARGS_SIZE note to prevent crossjumping of calls with
4752 different args sizes. */
4753 if (!ACCUMULATE_OUTGOING_ARGS
)
4754 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4758 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4762 /* Expand a call to __builtin_unreachable. We do nothing except emit
4763 a barrier saying that control flow will not pass here.
4765 It is the responsibility of the program being compiled to ensure
4766 that control flow does never reach __builtin_unreachable. */
4768 expand_builtin_unreachable (void)
4773 /* Expand EXP, a call to fabs, fabsf or fabsl.
4774 Return NULL_RTX if a normal call should be emitted rather than expanding
4775 the function inline. If convenient, the result should be placed
4776 in TARGET. SUBTARGET may be used as the target for computing
4780 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4786 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4789 arg
= CALL_EXPR_ARG (exp
, 0);
4790 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4791 mode
= TYPE_MODE (TREE_TYPE (arg
));
4792 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4793 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4796 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4797 Return NULL is a normal call should be emitted rather than expanding the
4798 function inline. If convenient, the result should be placed in TARGET.
4799 SUBTARGET may be used as the target for computing the operand. */
4802 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4807 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4810 arg
= CALL_EXPR_ARG (exp
, 0);
4811 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4813 arg
= CALL_EXPR_ARG (exp
, 1);
4814 op1
= expand_normal (arg
);
4816 return expand_copysign (op0
, op1
, target
);
4819 /* Expand a call to __builtin___clear_cache. */
4822 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4824 #ifndef HAVE_clear_cache
4825 #ifdef CLEAR_INSN_CACHE
4826 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4827 does something. Just do the default expansion to a call to
4831 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4832 does nothing. There is no need to call it. Do nothing. */
4834 #endif /* CLEAR_INSN_CACHE */
4836 /* We have a "clear_cache" insn, and it will handle everything. */
4838 rtx begin_rtx
, end_rtx
;
4840 /* We must not expand to a library call. If we did, any
4841 fallback library function in libgcc that might contain a call to
4842 __builtin___clear_cache() would recurse infinitely. */
4843 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4845 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4849 if (HAVE_clear_cache
)
4851 struct expand_operand ops
[2];
4853 begin
= CALL_EXPR_ARG (exp
, 0);
4854 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4856 end
= CALL_EXPR_ARG (exp
, 1);
4857 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4859 create_address_operand (&ops
[0], begin_rtx
);
4860 create_address_operand (&ops
[1], end_rtx
);
4861 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4865 #endif /* HAVE_clear_cache */
4868 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4871 round_trampoline_addr (rtx tramp
)
4873 rtx temp
, addend
, mask
;
4875 /* If we don't need too much alignment, we'll have been guaranteed
4876 proper alignment by get_trampoline_type. */
4877 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4880 /* Round address up to desired boundary. */
4881 temp
= gen_reg_rtx (Pmode
);
4882 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4883 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4885 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4886 temp
, 0, OPTAB_LIB_WIDEN
);
4887 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4888 temp
, 0, OPTAB_LIB_WIDEN
);
4894 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4896 tree t_tramp
, t_func
, t_chain
;
4897 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4899 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4900 POINTER_TYPE
, VOID_TYPE
))
4903 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4904 t_func
= CALL_EXPR_ARG (exp
, 1);
4905 t_chain
= CALL_EXPR_ARG (exp
, 2);
4907 r_tramp
= expand_normal (t_tramp
);
4908 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4909 MEM_NOTRAP_P (m_tramp
) = 1;
4911 /* If ONSTACK, the TRAMP argument should be the address of a field
4912 within the local function's FRAME decl. Either way, let's see if
4913 we can fill in the MEM_ATTRs for this memory. */
4914 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4915 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4917 /* Creator of a heap trampoline is responsible for making sure the
4918 address is aligned to at least STACK_BOUNDARY. Normally malloc
4919 will ensure this anyhow. */
4920 tmp
= round_trampoline_addr (r_tramp
);
4923 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4924 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4925 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4928 /* The FUNC argument should be the address of the nested function.
4929 Extract the actual function decl to pass to the hook. */
4930 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4931 t_func
= TREE_OPERAND (t_func
, 0);
4932 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4934 r_chain
= expand_normal (t_chain
);
4936 /* Generate insns to initialize the trampoline. */
4937 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4941 trampolines_created
= 1;
4943 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4944 "trampoline generated for nested function %qD", t_func
);
4951 expand_builtin_adjust_trampoline (tree exp
)
4955 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4958 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4959 tramp
= round_trampoline_addr (tramp
);
4960 if (targetm
.calls
.trampoline_adjust_address
)
4961 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4966 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4967 function. The function first checks whether the back end provides
4968 an insn to implement signbit for the respective mode. If not, it
4969 checks whether the floating point format of the value is such that
4970 the sign bit can be extracted. If that is not the case, the
4971 function returns NULL_RTX to indicate that a normal call should be
4972 emitted rather than expanding the function in-line. EXP is the
4973 expression that is a call to the builtin function; if convenient,
4974 the result should be placed in TARGET. */
4976 expand_builtin_signbit (tree exp
, rtx target
)
4978 const struct real_format
*fmt
;
4979 machine_mode fmode
, imode
, rmode
;
4982 enum insn_code icode
;
4984 location_t loc
= EXPR_LOCATION (exp
);
4986 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4989 arg
= CALL_EXPR_ARG (exp
, 0);
4990 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4991 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4992 fmt
= REAL_MODE_FORMAT (fmode
);
4994 arg
= builtin_save_expr (arg
);
4996 /* Expand the argument yielding a RTX expression. */
4997 temp
= expand_normal (arg
);
4999 /* Check if the back end provides an insn that handles signbit for the
5001 icode
= optab_handler (signbit_optab
, fmode
);
5002 if (icode
!= CODE_FOR_nothing
)
5004 rtx_insn
*last
= get_last_insn ();
5005 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5006 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5008 delete_insns_since (last
);
5011 /* For floating point formats without a sign bit, implement signbit
5013 bitpos
= fmt
->signbit_ro
;
5016 /* But we can't do this if the format supports signed zero. */
5017 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5020 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5021 build_real (TREE_TYPE (arg
), dconst0
));
5022 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5025 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5027 imode
= int_mode_for_mode (fmode
);
5028 if (imode
== BLKmode
)
5030 temp
= gen_lowpart (imode
, temp
);
5035 /* Handle targets with different FP word orders. */
5036 if (FLOAT_WORDS_BIG_ENDIAN
)
5037 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5039 word
= bitpos
/ BITS_PER_WORD
;
5040 temp
= operand_subword_force (temp
, word
, fmode
);
5041 bitpos
= bitpos
% BITS_PER_WORD
;
5044 /* Force the intermediate word_mode (or narrower) result into a
5045 register. This avoids attempting to create paradoxical SUBREGs
5046 of floating point modes below. */
5047 temp
= force_reg (imode
, temp
);
5049 /* If the bitpos is within the "result mode" lowpart, the operation
5050 can be implement with a single bitwise AND. Otherwise, we need
5051 a right shift and an AND. */
5053 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5055 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5057 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5058 temp
= gen_lowpart (rmode
, temp
);
5059 temp
= expand_binop (rmode
, and_optab
, temp
,
5060 immed_wide_int_const (mask
, rmode
),
5061 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5065 /* Perform a logical right shift to place the signbit in the least
5066 significant bit, then truncate the result to the desired mode
5067 and mask just this bit. */
5068 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5069 temp
= gen_lowpart (rmode
, temp
);
5070 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5071 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5077 /* Expand fork or exec calls. TARGET is the desired target of the
5078 call. EXP is the call. FN is the
5079 identificator of the actual function. IGNORE is nonzero if the
5080 value is to be ignored. */
5083 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5088 /* If we are not profiling, just call the function. */
5089 if (!profile_arc_flag
)
5092 /* Otherwise call the wrapper. This should be equivalent for the rest of
5093 compiler, so the code does not diverge, and the wrapper may run the
5094 code necessary for keeping the profiling sane. */
5096 switch (DECL_FUNCTION_CODE (fn
))
5099 id
= get_identifier ("__gcov_fork");
5102 case BUILT_IN_EXECL
:
5103 id
= get_identifier ("__gcov_execl");
5106 case BUILT_IN_EXECV
:
5107 id
= get_identifier ("__gcov_execv");
5110 case BUILT_IN_EXECLP
:
5111 id
= get_identifier ("__gcov_execlp");
5114 case BUILT_IN_EXECLE
:
5115 id
= get_identifier ("__gcov_execle");
5118 case BUILT_IN_EXECVP
:
5119 id
= get_identifier ("__gcov_execvp");
5122 case BUILT_IN_EXECVE
:
5123 id
= get_identifier ("__gcov_execve");
5130 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5131 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5132 DECL_EXTERNAL (decl
) = 1;
5133 TREE_PUBLIC (decl
) = 1;
5134 DECL_ARTIFICIAL (decl
) = 1;
5135 TREE_NOTHROW (decl
) = 1;
5136 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5137 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5138 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5139 return expand_call (call
, target
, ignore
);
5144 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5145 the pointer in these functions is void*, the tree optimizers may remove
5146 casts. The mode computed in expand_builtin isn't reliable either, due
5147 to __sync_bool_compare_and_swap.
5149 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5150 group of builtins. This gives us log2 of the mode size. */
5152 static inline machine_mode
5153 get_builtin_sync_mode (int fcode_diff
)
5155 /* The size is not negotiable, so ask not to get BLKmode in return
5156 if the target indicates that a smaller size would be better. */
5157 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5160 /* Expand the memory expression LOC and return the appropriate memory operand
5161 for the builtin_sync operations. */
5164 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5168 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5169 addr
= convert_memory_address (Pmode
, addr
);
5171 /* Note that we explicitly do not want any alias information for this
5172 memory, so that we kill all other live memories. Otherwise we don't
5173 satisfy the full barrier semantics of the intrinsic. */
5174 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5176 /* The alignment needs to be at least according to that of the mode. */
5177 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5178 get_pointer_alignment (loc
)));
5179 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5180 MEM_VOLATILE_P (mem
) = 1;
5185 /* Make sure an argument is in the right mode.
5186 EXP is the tree argument.
5187 MODE is the mode it should be in. */
5190 expand_expr_force_mode (tree exp
, machine_mode mode
)
5193 machine_mode old_mode
;
5195 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5196 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5197 of CONST_INTs, where we know the old_mode only from the call argument. */
5199 old_mode
= GET_MODE (val
);
5200 if (old_mode
== VOIDmode
)
5201 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5202 val
= convert_modes (mode
, old_mode
, val
, 1);
5207 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5208 EXP is the CALL_EXPR. CODE is the rtx code
5209 that corresponds to the arithmetic or logical operation from the name;
5210 an exception here is that NOT actually means NAND. TARGET is an optional
5211 place for us to store the results; AFTER is true if this is the
5212 fetch_and_xxx form. */
5215 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5216 enum rtx_code code
, bool after
,
5220 location_t loc
= EXPR_LOCATION (exp
);
5222 if (code
== NOT
&& warn_sync_nand
)
5224 tree fndecl
= get_callee_fndecl (exp
);
5225 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5227 static bool warned_f_a_n
, warned_n_a_f
;
5231 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5232 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5233 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5234 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5235 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5239 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5240 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5241 warned_f_a_n
= true;
5244 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5245 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5246 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5247 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5248 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5252 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5253 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5254 warned_n_a_f
= true;
5262 /* Expand the operands. */
5263 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5264 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5266 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5270 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5271 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5272 true if this is the boolean form. TARGET is a place for us to store the
5273 results; this is NOT optional if IS_BOOL is true. */
5276 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5277 bool is_bool
, rtx target
)
5279 rtx old_val
, new_val
, mem
;
5282 /* Expand the operands. */
5283 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5284 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5285 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5287 pbool
= poval
= NULL
;
5288 if (target
!= const0_rtx
)
5295 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5296 false, MEMMODEL_SEQ_CST
,
5303 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5304 general form is actually an atomic exchange, and some targets only
5305 support a reduced form with the second argument being a constant 1.
5306 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5310 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5315 /* Expand the operands. */
5316 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5317 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5319 return expand_sync_lock_test_and_set (target
, mem
, val
);
5322 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5325 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5329 /* Expand the operands. */
5330 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5332 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5335 /* Given an integer representing an ``enum memmodel'', verify its
5336 correctness and return the memory model enum. */
5338 static enum memmodel
5339 get_memmodel (tree exp
)
5342 unsigned HOST_WIDE_INT val
;
5344 /* If the parameter is not a constant, it's a run time value so we'll just
5345 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5346 if (TREE_CODE (exp
) != INTEGER_CST
)
5347 return MEMMODEL_SEQ_CST
;
5349 op
= expand_normal (exp
);
5352 if (targetm
.memmodel_check
)
5353 val
= targetm
.memmodel_check (val
);
5354 else if (val
& ~MEMMODEL_MASK
)
5356 warning (OPT_Winvalid_memory_model
,
5357 "Unknown architecture specifier in memory model to builtin.");
5358 return MEMMODEL_SEQ_CST
;
5361 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5363 warning (OPT_Winvalid_memory_model
,
5364 "invalid memory model argument to builtin");
5365 return MEMMODEL_SEQ_CST
;
5368 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5369 be conservative and promote consume to acquire. */
5370 if (val
== MEMMODEL_CONSUME
)
5371 val
= MEMMODEL_ACQUIRE
;
5373 return (enum memmodel
) val
;
5376 /* Expand the __atomic_exchange intrinsic:
5377 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5378 EXP is the CALL_EXPR.
5379 TARGET is an optional place for us to store the results. */
5382 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5385 enum memmodel model
;
5387 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5389 if (!flag_inline_atomics
)
5392 /* Expand the operands. */
5393 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5394 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5396 return expand_atomic_exchange (target
, mem
, val
, model
);
5399 /* Expand the __atomic_compare_exchange intrinsic:
5400 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5401 TYPE desired, BOOL weak,
5402 enum memmodel success,
5403 enum memmodel failure)
5404 EXP is the CALL_EXPR.
5405 TARGET is an optional place for us to store the results. */
5408 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5411 rtx expect
, desired
, mem
, oldval
;
5412 rtx_code_label
*label
;
5413 enum memmodel success
, failure
;
5417 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5418 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5420 if (failure
> success
)
5422 warning (OPT_Winvalid_memory_model
,
5423 "failure memory model cannot be stronger than success memory "
5424 "model for %<__atomic_compare_exchange%>");
5425 success
= MEMMODEL_SEQ_CST
;
5428 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5429 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5431 warning (OPT_Winvalid_memory_model
,
5432 "invalid failure memory model for "
5433 "%<__atomic_compare_exchange%>");
5434 failure
= MEMMODEL_SEQ_CST
;
5435 success
= MEMMODEL_SEQ_CST
;
5439 if (!flag_inline_atomics
)
5442 /* Expand the operands. */
5443 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5445 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5446 expect
= convert_memory_address (Pmode
, expect
);
5447 expect
= gen_rtx_MEM (mode
, expect
);
5448 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5450 weak
= CALL_EXPR_ARG (exp
, 3);
5452 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5455 if (target
== const0_rtx
)
5458 /* Lest the rtl backend create a race condition with an imporoper store
5459 to memory, always create a new pseudo for OLDVAL. */
5462 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5463 is_weak
, success
, failure
))
5466 /* Conditionally store back to EXPECT, lest we create a race condition
5467 with an improper store to memory. */
5468 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5469 the normal case where EXPECT is totally private, i.e. a register. At
5470 which point the store can be unconditional. */
5471 label
= gen_label_rtx ();
5472 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
, VOIDmode
, 1, label
);
5473 emit_move_insn (expect
, oldval
);
5479 /* Expand the __atomic_load intrinsic:
5480 TYPE __atomic_load (TYPE *object, enum memmodel)
5481 EXP is the CALL_EXPR.
5482 TARGET is an optional place for us to store the results. */
5485 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5488 enum memmodel model
;
5490 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5491 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5492 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5494 warning (OPT_Winvalid_memory_model
,
5495 "invalid memory model for %<__atomic_load%>");
5496 model
= MEMMODEL_SEQ_CST
;
5499 if (!flag_inline_atomics
)
5502 /* Expand the operand. */
5503 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5505 return expand_atomic_load (target
, mem
, model
);
5509 /* Expand the __atomic_store intrinsic:
5510 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5511 EXP is the CALL_EXPR.
5512 TARGET is an optional place for us to store the results. */
5515 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5518 enum memmodel model
;
5520 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5521 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5522 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5523 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5525 warning (OPT_Winvalid_memory_model
,
5526 "invalid memory model for %<__atomic_store%>");
5527 model
= MEMMODEL_SEQ_CST
;
5530 if (!flag_inline_atomics
)
5533 /* Expand the operands. */
5534 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5535 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5537 return expand_atomic_store (mem
, val
, model
, false);
5540 /* Expand the __atomic_fetch_XXX intrinsic:
5541 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5542 EXP is the CALL_EXPR.
5543 TARGET is an optional place for us to store the results.
5544 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5545 FETCH_AFTER is true if returning the result of the operation.
5546 FETCH_AFTER is false if returning the value before the operation.
5547 IGNORE is true if the result is not used.
5548 EXT_CALL is the correct builtin for an external call if this cannot be
5549 resolved to an instruction sequence. */
5552 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5553 enum rtx_code code
, bool fetch_after
,
5554 bool ignore
, enum built_in_function ext_call
)
5557 enum memmodel model
;
5561 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5563 /* Expand the operands. */
5564 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5565 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5567 /* Only try generating instructions if inlining is turned on. */
5568 if (flag_inline_atomics
)
5570 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5575 /* Return if a different routine isn't needed for the library call. */
5576 if (ext_call
== BUILT_IN_NONE
)
5579 /* Change the call to the specified function. */
5580 fndecl
= get_callee_fndecl (exp
);
5581 addr
= CALL_EXPR_FN (exp
);
5584 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5585 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5587 /* Expand the call here so we can emit trailing code. */
5588 ret
= expand_call (exp
, target
, ignore
);
5590 /* Replace the original function just in case it matters. */
5591 TREE_OPERAND (addr
, 0) = fndecl
;
5593 /* Then issue the arithmetic correction to return the right result. */
5598 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5600 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5603 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5610 #ifndef HAVE_atomic_clear
5611 # define HAVE_atomic_clear 0
5612 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5615 /* Expand an atomic clear operation.
5616 void _atomic_clear (BOOL *obj, enum memmodel)
5617 EXP is the call expression. */
5620 expand_builtin_atomic_clear (tree exp
)
5624 enum memmodel model
;
5626 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5627 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5628 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5630 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
5631 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5632 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5634 warning (OPT_Winvalid_memory_model
,
5635 "invalid memory model for %<__atomic_store%>");
5636 model
= MEMMODEL_SEQ_CST
;
5639 if (HAVE_atomic_clear
)
5641 emit_insn (gen_atomic_clear (mem
, model
));
5645 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5646 Failing that, a store is issued by __atomic_store. The only way this can
5647 fail is if the bool type is larger than a word size. Unlikely, but
5648 handle it anyway for completeness. Assume a single threaded model since
5649 there is no atomic support in this case, and no barriers are required. */
5650 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5652 emit_move_insn (mem
, const0_rtx
);
5656 /* Expand an atomic test_and_set operation.
5657 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5658 EXP is the call expression. */
5661 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5664 enum memmodel model
;
5667 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5668 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5669 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5671 return expand_atomic_test_and_set (target
, mem
, model
);
5675 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5676 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5679 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5683 unsigned int mode_align
, type_align
;
5685 if (TREE_CODE (arg0
) != INTEGER_CST
)
5688 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5689 mode
= mode_for_size (size
, MODE_INT
, 0);
5690 mode_align
= GET_MODE_ALIGNMENT (mode
);
5692 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5693 type_align
= mode_align
;
5696 tree ttype
= TREE_TYPE (arg1
);
5698 /* This function is usually invoked and folded immediately by the front
5699 end before anything else has a chance to look at it. The pointer
5700 parameter at this point is usually cast to a void *, so check for that
5701 and look past the cast. */
5702 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5703 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5704 arg1
= TREE_OPERAND (arg1
, 0);
5706 ttype
= TREE_TYPE (arg1
);
5707 gcc_assert (POINTER_TYPE_P (ttype
));
5709 /* Get the underlying type of the object. */
5710 ttype
= TREE_TYPE (ttype
);
5711 type_align
= TYPE_ALIGN (ttype
);
5714 /* If the object has smaller alignment, the the lock free routines cannot
5716 if (type_align
< mode_align
)
5717 return boolean_false_node
;
5719 /* Check if a compare_and_swap pattern exists for the mode which represents
5720 the required size. The pattern is not allowed to fail, so the existence
5721 of the pattern indicates support is present. */
5722 if (can_compare_and_swap_p (mode
, true))
5723 return boolean_true_node
;
5725 return boolean_false_node
;
5728 /* Return true if the parameters to call EXP represent an object which will
5729 always generate lock free instructions. The first argument represents the
5730 size of the object, and the second parameter is a pointer to the object
5731 itself. If NULL is passed for the object, then the result is based on
5732 typical alignment for an object of the specified size. Otherwise return
5736 expand_builtin_atomic_always_lock_free (tree exp
)
5739 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5740 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5742 if (TREE_CODE (arg0
) != INTEGER_CST
)
5744 error ("non-constant argument 1 to __atomic_always_lock_free");
5748 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5749 if (size
== boolean_true_node
)
5754 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5755 is lock free on this architecture. */
5758 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5760 if (!flag_inline_atomics
)
5763 /* If it isn't always lock free, don't generate a result. */
5764 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5765 return boolean_true_node
;
5770 /* Return true if the parameters to call EXP represent an object which will
5771 always generate lock free instructions. The first argument represents the
5772 size of the object, and the second parameter is a pointer to the object
5773 itself. If NULL is passed for the object, then the result is based on
5774 typical alignment for an object of the specified size. Otherwise return
5778 expand_builtin_atomic_is_lock_free (tree exp
)
5781 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5782 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5784 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5786 error ("non-integer argument 1 to __atomic_is_lock_free");
5790 if (!flag_inline_atomics
)
5793 /* If the value is known at compile time, return the RTX for it. */
5794 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5795 if (size
== boolean_true_node
)
5801 /* Expand the __atomic_thread_fence intrinsic:
5802 void __atomic_thread_fence (enum memmodel)
5803 EXP is the CALL_EXPR. */
5806 expand_builtin_atomic_thread_fence (tree exp
)
5808 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5809 expand_mem_thread_fence (model
);
5812 /* Expand the __atomic_signal_fence intrinsic:
5813 void __atomic_signal_fence (enum memmodel)
5814 EXP is the CALL_EXPR. */
5817 expand_builtin_atomic_signal_fence (tree exp
)
5819 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5820 expand_mem_signal_fence (model
);
5823 /* Expand the __sync_synchronize intrinsic. */
5826 expand_builtin_sync_synchronize (void)
5828 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5832 expand_builtin_thread_pointer (tree exp
, rtx target
)
5834 enum insn_code icode
;
5835 if (!validate_arglist (exp
, VOID_TYPE
))
5837 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5838 if (icode
!= CODE_FOR_nothing
)
5840 struct expand_operand op
;
5841 /* If the target is not sutitable then create a new target. */
5842 if (target
== NULL_RTX
5844 || GET_MODE (target
) != Pmode
)
5845 target
= gen_reg_rtx (Pmode
);
5846 create_output_operand (&op
, target
, Pmode
);
5847 expand_insn (icode
, 1, &op
);
5850 error ("__builtin_thread_pointer is not supported on this target");
5855 expand_builtin_set_thread_pointer (tree exp
)
5857 enum insn_code icode
;
5858 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5860 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5861 if (icode
!= CODE_FOR_nothing
)
5863 struct expand_operand op
;
5864 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5865 Pmode
, EXPAND_NORMAL
);
5866 create_input_operand (&op
, val
, Pmode
);
5867 expand_insn (icode
, 1, &op
);
5870 error ("__builtin_set_thread_pointer is not supported on this target");
5874 /* Emit code to restore the current value of stack. */
5877 expand_stack_restore (tree var
)
5880 rtx sa
= expand_normal (var
);
5882 sa
= convert_memory_address (Pmode
, sa
);
5884 prev
= get_last_insn ();
5885 emit_stack_restore (SAVE_BLOCK
, sa
);
5886 fixup_args_size_notes (prev
, get_last_insn (), 0);
5890 /* Emit code to save the current value of stack. */
5893 expand_stack_save (void)
5897 do_pending_stack_adjust ();
5898 emit_stack_save (SAVE_BLOCK
, &ret
);
5902 /* Expand an expression EXP that calls a built-in function,
5903 with result going to TARGET if that's convenient
5904 (and in mode MODE if that's convenient).
5905 SUBTARGET may be used as the target for computing one of EXP's operands.
5906 IGNORE is nonzero if the value is to be ignored. */
5909 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5912 tree fndecl
= get_callee_fndecl (exp
);
5913 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5914 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5917 /* When ASan is enabled, we don't want to expand some memory/string
5918 builtins and rely on libsanitizer's hooks. This allows us to avoid
5919 redundant checks and be sure, that possible overflow will be detected
5922 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5923 return expand_call (exp
, target
, ignore
);
5925 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5926 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5928 /* When not optimizing, generate calls to library functions for a certain
5931 && !called_as_built_in (fndecl
)
5932 && fcode
!= BUILT_IN_FORK
5933 && fcode
!= BUILT_IN_EXECL
5934 && fcode
!= BUILT_IN_EXECV
5935 && fcode
!= BUILT_IN_EXECLP
5936 && fcode
!= BUILT_IN_EXECLE
5937 && fcode
!= BUILT_IN_EXECVP
5938 && fcode
!= BUILT_IN_EXECVE
5939 && fcode
!= BUILT_IN_ALLOCA
5940 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5941 && fcode
!= BUILT_IN_FREE
5942 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5943 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5944 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5945 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5946 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5947 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5948 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5949 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5950 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5951 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5952 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5953 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5954 return expand_call (exp
, target
, ignore
);
5956 /* The built-in function expanders test for target == const0_rtx
5957 to determine whether the function's result will be ignored. */
5959 target
= const0_rtx
;
5961 /* If the result of a pure or const built-in function is ignored, and
5962 none of its arguments are volatile, we can avoid expanding the
5963 built-in call and just evaluate the arguments for side-effects. */
5964 if (target
== const0_rtx
5965 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5966 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5968 bool volatilep
= false;
5970 call_expr_arg_iterator iter
;
5972 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5973 if (TREE_THIS_VOLATILE (arg
))
5981 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5982 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5987 /* expand_builtin_with_bounds is supposed to be used for
5988 instrumented builtin calls. */
5989 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5993 CASE_FLT_FN (BUILT_IN_FABS
):
5994 case BUILT_IN_FABSD32
:
5995 case BUILT_IN_FABSD64
:
5996 case BUILT_IN_FABSD128
:
5997 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6002 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6003 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6008 /* Just do a normal library call if we were unable to fold
6010 CASE_FLT_FN (BUILT_IN_CABS
):
6013 CASE_FLT_FN (BUILT_IN_EXP
):
6014 CASE_FLT_FN (BUILT_IN_EXP10
):
6015 CASE_FLT_FN (BUILT_IN_POW10
):
6016 CASE_FLT_FN (BUILT_IN_EXP2
):
6017 CASE_FLT_FN (BUILT_IN_EXPM1
):
6018 CASE_FLT_FN (BUILT_IN_LOGB
):
6019 CASE_FLT_FN (BUILT_IN_LOG
):
6020 CASE_FLT_FN (BUILT_IN_LOG10
):
6021 CASE_FLT_FN (BUILT_IN_LOG2
):
6022 CASE_FLT_FN (BUILT_IN_LOG1P
):
6023 CASE_FLT_FN (BUILT_IN_TAN
):
6024 CASE_FLT_FN (BUILT_IN_ASIN
):
6025 CASE_FLT_FN (BUILT_IN_ACOS
):
6026 CASE_FLT_FN (BUILT_IN_ATAN
):
6027 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
6028 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6029 because of possible accuracy problems. */
6030 if (! flag_unsafe_math_optimizations
)
6032 CASE_FLT_FN (BUILT_IN_SQRT
):
6033 CASE_FLT_FN (BUILT_IN_FLOOR
):
6034 CASE_FLT_FN (BUILT_IN_CEIL
):
6035 CASE_FLT_FN (BUILT_IN_TRUNC
):
6036 CASE_FLT_FN (BUILT_IN_ROUND
):
6037 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6038 CASE_FLT_FN (BUILT_IN_RINT
):
6039 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6044 CASE_FLT_FN (BUILT_IN_FMA
):
6045 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6050 CASE_FLT_FN (BUILT_IN_ILOGB
):
6051 if (! flag_unsafe_math_optimizations
)
6053 CASE_FLT_FN (BUILT_IN_ISINF
):
6054 CASE_FLT_FN (BUILT_IN_FINITE
):
6055 case BUILT_IN_ISFINITE
:
6056 case BUILT_IN_ISNORMAL
:
6057 target
= expand_builtin_interclass_mathfn (exp
, target
);
6062 CASE_FLT_FN (BUILT_IN_ICEIL
):
6063 CASE_FLT_FN (BUILT_IN_LCEIL
):
6064 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6065 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6066 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6067 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6068 target
= expand_builtin_int_roundingfn (exp
, target
);
6073 CASE_FLT_FN (BUILT_IN_IRINT
):
6074 CASE_FLT_FN (BUILT_IN_LRINT
):
6075 CASE_FLT_FN (BUILT_IN_LLRINT
):
6076 CASE_FLT_FN (BUILT_IN_IROUND
):
6077 CASE_FLT_FN (BUILT_IN_LROUND
):
6078 CASE_FLT_FN (BUILT_IN_LLROUND
):
6079 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6084 CASE_FLT_FN (BUILT_IN_POWI
):
6085 target
= expand_builtin_powi (exp
, target
);
6090 CASE_FLT_FN (BUILT_IN_ATAN2
):
6091 CASE_FLT_FN (BUILT_IN_LDEXP
):
6092 CASE_FLT_FN (BUILT_IN_SCALB
):
6093 CASE_FLT_FN (BUILT_IN_SCALBN
):
6094 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6095 if (! flag_unsafe_math_optimizations
)
6098 CASE_FLT_FN (BUILT_IN_FMOD
):
6099 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6100 CASE_FLT_FN (BUILT_IN_DREM
):
6101 CASE_FLT_FN (BUILT_IN_POW
):
6102 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6107 CASE_FLT_FN (BUILT_IN_CEXPI
):
6108 target
= expand_builtin_cexpi (exp
, target
);
6109 gcc_assert (target
);
6112 CASE_FLT_FN (BUILT_IN_SIN
):
6113 CASE_FLT_FN (BUILT_IN_COS
):
6114 if (! flag_unsafe_math_optimizations
)
6116 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6121 CASE_FLT_FN (BUILT_IN_SINCOS
):
6122 if (! flag_unsafe_math_optimizations
)
6124 target
= expand_builtin_sincos (exp
);
6129 case BUILT_IN_APPLY_ARGS
:
6130 return expand_builtin_apply_args ();
6132 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6133 FUNCTION with a copy of the parameters described by
6134 ARGUMENTS, and ARGSIZE. It returns a block of memory
6135 allocated on the stack into which is stored all the registers
6136 that might possibly be used for returning the result of a
6137 function. ARGUMENTS is the value returned by
6138 __builtin_apply_args. ARGSIZE is the number of bytes of
6139 arguments that must be copied. ??? How should this value be
6140 computed? We'll also need a safe worst case value for varargs
6142 case BUILT_IN_APPLY
:
6143 if (!validate_arglist (exp
, POINTER_TYPE
,
6144 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6145 && !validate_arglist (exp
, REFERENCE_TYPE
,
6146 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6152 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6153 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6154 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6156 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6159 /* __builtin_return (RESULT) causes the function to return the
6160 value described by RESULT. RESULT is address of the block of
6161 memory returned by __builtin_apply. */
6162 case BUILT_IN_RETURN
:
6163 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6164 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6167 case BUILT_IN_SAVEREGS
:
6168 return expand_builtin_saveregs ();
6170 case BUILT_IN_VA_ARG_PACK
:
6171 /* All valid uses of __builtin_va_arg_pack () are removed during
6173 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6176 case BUILT_IN_VA_ARG_PACK_LEN
:
6177 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6179 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6182 /* Return the address of the first anonymous stack arg. */
6183 case BUILT_IN_NEXT_ARG
:
6184 if (fold_builtin_next_arg (exp
, false))
6186 return expand_builtin_next_arg ();
6188 case BUILT_IN_CLEAR_CACHE
:
6189 target
= expand_builtin___clear_cache (exp
);
6194 case BUILT_IN_CLASSIFY_TYPE
:
6195 return expand_builtin_classify_type (exp
);
6197 case BUILT_IN_CONSTANT_P
:
6200 case BUILT_IN_FRAME_ADDRESS
:
6201 case BUILT_IN_RETURN_ADDRESS
:
6202 return expand_builtin_frame_address (fndecl
, exp
);
6204 /* Returns the address of the area where the structure is returned.
6206 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6207 if (call_expr_nargs (exp
) != 0
6208 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6209 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6212 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6214 case BUILT_IN_ALLOCA
:
6215 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6216 /* If the allocation stems from the declaration of a variable-sized
6217 object, it cannot accumulate. */
6218 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6223 case BUILT_IN_STACK_SAVE
:
6224 return expand_stack_save ();
6226 case BUILT_IN_STACK_RESTORE
:
6227 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6230 case BUILT_IN_BSWAP16
:
6231 case BUILT_IN_BSWAP32
:
6232 case BUILT_IN_BSWAP64
:
6233 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6238 CASE_INT_FN (BUILT_IN_FFS
):
6239 target
= expand_builtin_unop (target_mode
, exp
, target
,
6240 subtarget
, ffs_optab
);
6245 CASE_INT_FN (BUILT_IN_CLZ
):
6246 target
= expand_builtin_unop (target_mode
, exp
, target
,
6247 subtarget
, clz_optab
);
6252 CASE_INT_FN (BUILT_IN_CTZ
):
6253 target
= expand_builtin_unop (target_mode
, exp
, target
,
6254 subtarget
, ctz_optab
);
6259 CASE_INT_FN (BUILT_IN_CLRSB
):
6260 target
= expand_builtin_unop (target_mode
, exp
, target
,
6261 subtarget
, clrsb_optab
);
6266 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6267 target
= expand_builtin_unop (target_mode
, exp
, target
,
6268 subtarget
, popcount_optab
);
6273 CASE_INT_FN (BUILT_IN_PARITY
):
6274 target
= expand_builtin_unop (target_mode
, exp
, target
,
6275 subtarget
, parity_optab
);
6280 case BUILT_IN_STRLEN
:
6281 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6286 case BUILT_IN_STRCPY
:
6287 target
= expand_builtin_strcpy (exp
, target
);
6292 case BUILT_IN_STRNCPY
:
6293 target
= expand_builtin_strncpy (exp
, target
);
6298 case BUILT_IN_STPCPY
:
6299 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6304 case BUILT_IN_MEMCPY
:
6305 target
= expand_builtin_memcpy (exp
, target
);
6310 case BUILT_IN_MEMPCPY
:
6311 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6316 case BUILT_IN_MEMSET
:
6317 target
= expand_builtin_memset (exp
, target
, mode
);
6322 case BUILT_IN_BZERO
:
6323 target
= expand_builtin_bzero (exp
);
6328 case BUILT_IN_STRCMP
:
6329 target
= expand_builtin_strcmp (exp
, target
);
6334 case BUILT_IN_STRNCMP
:
6335 target
= expand_builtin_strncmp (exp
, target
, mode
);
6341 case BUILT_IN_MEMCMP
:
6342 target
= expand_builtin_memcmp (exp
, target
, mode
);
6347 case BUILT_IN_SETJMP
:
6348 /* This should have been lowered to the builtins below. */
6351 case BUILT_IN_SETJMP_SETUP
:
6352 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6353 and the receiver label. */
6354 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6356 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6357 VOIDmode
, EXPAND_NORMAL
);
6358 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6359 rtx label_r
= label_rtx (label
);
6361 /* This is copied from the handling of non-local gotos. */
6362 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6363 nonlocal_goto_handler_labels
6364 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6365 nonlocal_goto_handler_labels
);
6366 /* ??? Do not let expand_label treat us as such since we would
6367 not want to be both on the list of non-local labels and on
6368 the list of forced labels. */
6369 FORCED_LABEL (label
) = 0;
6374 case BUILT_IN_SETJMP_RECEIVER
:
6375 /* __builtin_setjmp_receiver is passed the receiver label. */
6376 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6378 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6379 rtx label_r
= label_rtx (label
);
6381 expand_builtin_setjmp_receiver (label_r
);
6386 /* __builtin_longjmp is passed a pointer to an array of five words.
6387 It's similar to the C library longjmp function but works with
6388 __builtin_setjmp above. */
6389 case BUILT_IN_LONGJMP
:
6390 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6392 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6393 VOIDmode
, EXPAND_NORMAL
);
6394 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6396 if (value
!= const1_rtx
)
6398 error ("%<__builtin_longjmp%> second argument must be 1");
6402 expand_builtin_longjmp (buf_addr
, value
);
6407 case BUILT_IN_NONLOCAL_GOTO
:
6408 target
= expand_builtin_nonlocal_goto (exp
);
6413 /* This updates the setjmp buffer that is its argument with the value
6414 of the current stack pointer. */
6415 case BUILT_IN_UPDATE_SETJMP_BUF
:
6416 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6419 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6421 expand_builtin_update_setjmp_buf (buf_addr
);
6427 expand_builtin_trap ();
6430 case BUILT_IN_UNREACHABLE
:
6431 expand_builtin_unreachable ();
6434 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6435 case BUILT_IN_SIGNBITD32
:
6436 case BUILT_IN_SIGNBITD64
:
6437 case BUILT_IN_SIGNBITD128
:
6438 target
= expand_builtin_signbit (exp
, target
);
6443 /* Various hooks for the DWARF 2 __throw routine. */
6444 case BUILT_IN_UNWIND_INIT
:
6445 expand_builtin_unwind_init ();
6447 case BUILT_IN_DWARF_CFA
:
6448 return virtual_cfa_rtx
;
6449 #ifdef DWARF2_UNWIND_INFO
6450 case BUILT_IN_DWARF_SP_COLUMN
:
6451 return expand_builtin_dwarf_sp_column ();
6452 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6453 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6456 case BUILT_IN_FROB_RETURN_ADDR
:
6457 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6458 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6459 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6460 case BUILT_IN_EH_RETURN
:
6461 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6462 CALL_EXPR_ARG (exp
, 1));
6464 #ifdef EH_RETURN_DATA_REGNO
6465 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6466 return expand_builtin_eh_return_data_regno (exp
);
6468 case BUILT_IN_EXTEND_POINTER
:
6469 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6470 case BUILT_IN_EH_POINTER
:
6471 return expand_builtin_eh_pointer (exp
);
6472 case BUILT_IN_EH_FILTER
:
6473 return expand_builtin_eh_filter (exp
);
6474 case BUILT_IN_EH_COPY_VALUES
:
6475 return expand_builtin_eh_copy_values (exp
);
6477 case BUILT_IN_VA_START
:
6478 return expand_builtin_va_start (exp
);
6479 case BUILT_IN_VA_END
:
6480 return expand_builtin_va_end (exp
);
6481 case BUILT_IN_VA_COPY
:
6482 return expand_builtin_va_copy (exp
);
6483 case BUILT_IN_EXPECT
:
6484 return expand_builtin_expect (exp
, target
);
6485 case BUILT_IN_ASSUME_ALIGNED
:
6486 return expand_builtin_assume_aligned (exp
, target
);
6487 case BUILT_IN_PREFETCH
:
6488 expand_builtin_prefetch (exp
);
6491 case BUILT_IN_INIT_TRAMPOLINE
:
6492 return expand_builtin_init_trampoline (exp
, true);
6493 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6494 return expand_builtin_init_trampoline (exp
, false);
6495 case BUILT_IN_ADJUST_TRAMPOLINE
:
6496 return expand_builtin_adjust_trampoline (exp
);
6499 case BUILT_IN_EXECL
:
6500 case BUILT_IN_EXECV
:
6501 case BUILT_IN_EXECLP
:
6502 case BUILT_IN_EXECLE
:
6503 case BUILT_IN_EXECVP
:
6504 case BUILT_IN_EXECVE
:
6505 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6510 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6511 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6512 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6513 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6514 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6515 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6516 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6521 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6522 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6523 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6524 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6525 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6526 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6527 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6532 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6533 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6534 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6535 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6536 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6537 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6538 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6543 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6544 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6545 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6546 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6547 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6548 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6549 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6554 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6555 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6556 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6557 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6558 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6559 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6560 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6565 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6566 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6567 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6568 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6569 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6570 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6571 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6576 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6577 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6578 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6579 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6580 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6581 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6582 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6587 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6588 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6589 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6590 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6591 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6592 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6593 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6598 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6599 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6600 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6601 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6602 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6603 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6604 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6609 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6610 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6611 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6612 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6613 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6614 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6615 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6620 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6621 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6622 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6623 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6624 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6625 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6626 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6631 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6632 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6633 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6634 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6635 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6636 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6637 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6642 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6643 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6644 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6645 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6646 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6647 if (mode
== VOIDmode
)
6648 mode
= TYPE_MODE (boolean_type_node
);
6649 if (!target
|| !register_operand (target
, mode
))
6650 target
= gen_reg_rtx (mode
);
6652 mode
= get_builtin_sync_mode
6653 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6654 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6659 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6660 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6661 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6662 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6663 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6664 mode
= get_builtin_sync_mode
6665 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6666 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6671 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6672 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6673 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6674 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6675 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6676 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6677 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6682 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6683 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6684 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6685 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6686 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6687 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6688 expand_builtin_sync_lock_release (mode
, exp
);
6691 case BUILT_IN_SYNC_SYNCHRONIZE
:
6692 expand_builtin_sync_synchronize ();
6695 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6696 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6697 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6698 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6699 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6700 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6701 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6710 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6712 unsigned int nargs
, z
;
6713 vec
<tree
, va_gc
> *vec
;
6716 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6717 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6721 /* If this is turned into an external library call, the weak parameter
6722 must be dropped to match the expected parameter list. */
6723 nargs
= call_expr_nargs (exp
);
6724 vec_alloc (vec
, nargs
- 1);
6725 for (z
= 0; z
< 3; z
++)
6726 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6727 /* Skip the boolean weak parameter. */
6728 for (z
= 4; z
< 6; z
++)
6729 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6730 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6734 case BUILT_IN_ATOMIC_LOAD_1
:
6735 case BUILT_IN_ATOMIC_LOAD_2
:
6736 case BUILT_IN_ATOMIC_LOAD_4
:
6737 case BUILT_IN_ATOMIC_LOAD_8
:
6738 case BUILT_IN_ATOMIC_LOAD_16
:
6739 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6740 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6745 case BUILT_IN_ATOMIC_STORE_1
:
6746 case BUILT_IN_ATOMIC_STORE_2
:
6747 case BUILT_IN_ATOMIC_STORE_4
:
6748 case BUILT_IN_ATOMIC_STORE_8
:
6749 case BUILT_IN_ATOMIC_STORE_16
:
6750 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6751 target
= expand_builtin_atomic_store (mode
, exp
);
6756 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6757 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6758 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6759 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6760 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6762 enum built_in_function lib
;
6763 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6764 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6765 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6766 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6772 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6773 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6774 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6775 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6776 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6778 enum built_in_function lib
;
6779 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6780 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6781 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6782 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6788 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6789 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6790 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6791 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6792 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6794 enum built_in_function lib
;
6795 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6796 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6797 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6798 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6804 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6805 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6806 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6807 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6808 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6810 enum built_in_function lib
;
6811 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6812 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6813 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6814 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6820 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6821 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6822 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6823 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6824 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6826 enum built_in_function lib
;
6827 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6828 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6829 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6830 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6836 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6837 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6838 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6839 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6840 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6842 enum built_in_function lib
;
6843 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6844 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6845 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6846 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6852 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6853 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6854 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6855 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6856 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6857 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6858 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6859 ignore
, BUILT_IN_NONE
);
6864 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6865 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6866 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6867 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6868 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6869 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6870 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6871 ignore
, BUILT_IN_NONE
);
6876 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6877 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6878 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6879 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6880 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6881 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6882 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6883 ignore
, BUILT_IN_NONE
);
6888 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6889 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6890 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6891 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6892 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6893 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6894 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6895 ignore
, BUILT_IN_NONE
);
6900 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6901 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6902 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6903 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6904 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6905 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6906 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6907 ignore
, BUILT_IN_NONE
);
6912 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6913 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6914 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6915 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6916 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6917 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6918 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6919 ignore
, BUILT_IN_NONE
);
6924 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6925 return expand_builtin_atomic_test_and_set (exp
, target
);
6927 case BUILT_IN_ATOMIC_CLEAR
:
6928 return expand_builtin_atomic_clear (exp
);
6930 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6931 return expand_builtin_atomic_always_lock_free (exp
);
6933 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6934 target
= expand_builtin_atomic_is_lock_free (exp
);
6939 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6940 expand_builtin_atomic_thread_fence (exp
);
6943 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6944 expand_builtin_atomic_signal_fence (exp
);
6947 case BUILT_IN_OBJECT_SIZE
:
6948 return expand_builtin_object_size (exp
);
6950 case BUILT_IN_MEMCPY_CHK
:
6951 case BUILT_IN_MEMPCPY_CHK
:
6952 case BUILT_IN_MEMMOVE_CHK
:
6953 case BUILT_IN_MEMSET_CHK
:
6954 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6959 case BUILT_IN_STRCPY_CHK
:
6960 case BUILT_IN_STPCPY_CHK
:
6961 case BUILT_IN_STRNCPY_CHK
:
6962 case BUILT_IN_STPNCPY_CHK
:
6963 case BUILT_IN_STRCAT_CHK
:
6964 case BUILT_IN_STRNCAT_CHK
:
6965 case BUILT_IN_SNPRINTF_CHK
:
6966 case BUILT_IN_VSNPRINTF_CHK
:
6967 maybe_emit_chk_warning (exp
, fcode
);
6970 case BUILT_IN_SPRINTF_CHK
:
6971 case BUILT_IN_VSPRINTF_CHK
:
6972 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6976 if (warn_free_nonheap_object
)
6977 maybe_emit_free_warning (exp
);
6980 case BUILT_IN_THREAD_POINTER
:
6981 return expand_builtin_thread_pointer (exp
, target
);
6983 case BUILT_IN_SET_THREAD_POINTER
:
6984 expand_builtin_set_thread_pointer (exp
);
6987 case BUILT_IN_CILK_DETACH
:
6988 expand_builtin_cilk_detach (exp
);
6991 case BUILT_IN_CILK_POP_FRAME
:
6992 expand_builtin_cilk_pop_frame (exp
);
6995 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6996 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6997 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6998 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6999 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7000 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7001 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7002 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7003 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7004 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7005 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7006 /* We allow user CHKP builtins if Pointer Bounds
7008 if (!chkp_function_instrumented_p (current_function_decl
))
7010 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7011 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7012 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7013 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7014 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7015 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7016 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7017 return expand_normal (size_zero_node
);
7018 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7019 return expand_normal (size_int (-1));
7025 case BUILT_IN_CHKP_BNDMK
:
7026 case BUILT_IN_CHKP_BNDSTX
:
7027 case BUILT_IN_CHKP_BNDCL
:
7028 case BUILT_IN_CHKP_BNDCU
:
7029 case BUILT_IN_CHKP_BNDLDX
:
7030 case BUILT_IN_CHKP_BNDRET
:
7031 case BUILT_IN_CHKP_INTERSECT
:
7032 case BUILT_IN_CHKP_NARROW
:
7033 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7034 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7035 /* Software implementation of Pointer Bounds Checker is NYI.
7036 Target support is required. */
7037 error ("Your target platform does not support -fcheck-pointer-bounds");
7040 default: /* just do library call, if unknown builtin */
7044 /* The switch statement above can drop through to cause the function
7045 to be called normally. */
7046 return expand_call (exp
, target
, ignore
);
7049 /* Similar to expand_builtin but is used for instrumented calls. */
7052 expand_builtin_with_bounds (tree exp
, rtx target
,
7053 rtx subtarget ATTRIBUTE_UNUSED
,
7054 machine_mode mode
, int ignore
)
7056 tree fndecl
= get_callee_fndecl (exp
);
7057 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7059 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7061 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7062 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7064 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7065 && fcode
< END_CHKP_BUILTINS
);
7069 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7070 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7075 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7076 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7081 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7082 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7091 /* The switch statement above can drop through to cause the function
7092 to be called normally. */
7093 return expand_call (exp
, target
, ignore
);
7096 /* Determine whether a tree node represents a call to a built-in
7097 function. If the tree T is a call to a built-in function with
7098 the right number of arguments of the appropriate types, return
7099 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7100 Otherwise the return value is END_BUILTINS. */
7102 enum built_in_function
7103 builtin_mathfn_code (const_tree t
)
7105 const_tree fndecl
, arg
, parmlist
;
7106 const_tree argtype
, parmtype
;
7107 const_call_expr_arg_iterator iter
;
7109 if (TREE_CODE (t
) != CALL_EXPR
7110 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7111 return END_BUILTINS
;
7113 fndecl
= get_callee_fndecl (t
);
7114 if (fndecl
== NULL_TREE
7115 || TREE_CODE (fndecl
) != FUNCTION_DECL
7116 || ! DECL_BUILT_IN (fndecl
)
7117 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7118 return END_BUILTINS
;
7120 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7121 init_const_call_expr_arg_iterator (t
, &iter
);
7122 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7124 /* If a function doesn't take a variable number of arguments,
7125 the last element in the list will have type `void'. */
7126 parmtype
= TREE_VALUE (parmlist
);
7127 if (VOID_TYPE_P (parmtype
))
7129 if (more_const_call_expr_args_p (&iter
))
7130 return END_BUILTINS
;
7131 return DECL_FUNCTION_CODE (fndecl
);
7134 if (! more_const_call_expr_args_p (&iter
))
7135 return END_BUILTINS
;
7137 arg
= next_const_call_expr_arg (&iter
);
7138 argtype
= TREE_TYPE (arg
);
7140 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7142 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7143 return END_BUILTINS
;
7145 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7147 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7148 return END_BUILTINS
;
7150 else if (POINTER_TYPE_P (parmtype
))
7152 if (! POINTER_TYPE_P (argtype
))
7153 return END_BUILTINS
;
7155 else if (INTEGRAL_TYPE_P (parmtype
))
7157 if (! INTEGRAL_TYPE_P (argtype
))
7158 return END_BUILTINS
;
7161 return END_BUILTINS
;
7164 /* Variable-length argument list. */
7165 return DECL_FUNCTION_CODE (fndecl
);
7168 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7169 evaluate to a constant. */
7172 fold_builtin_constant_p (tree arg
)
7174 /* We return 1 for a numeric type that's known to be a constant
7175 value at compile-time or for an aggregate type that's a
7176 literal constant. */
7179 /* If we know this is a constant, emit the constant of one. */
7180 if (CONSTANT_CLASS_P (arg
)
7181 || (TREE_CODE (arg
) == CONSTRUCTOR
7182 && TREE_CONSTANT (arg
)))
7183 return integer_one_node
;
7184 if (TREE_CODE (arg
) == ADDR_EXPR
)
7186 tree op
= TREE_OPERAND (arg
, 0);
7187 if (TREE_CODE (op
) == STRING_CST
7188 || (TREE_CODE (op
) == ARRAY_REF
7189 && integer_zerop (TREE_OPERAND (op
, 1))
7190 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7191 return integer_one_node
;
7194 /* If this expression has side effects, show we don't know it to be a
7195 constant. Likewise if it's a pointer or aggregate type since in
7196 those case we only want literals, since those are only optimized
7197 when generating RTL, not later.
7198 And finally, if we are compiling an initializer, not code, we
7199 need to return a definite result now; there's not going to be any
7200 more optimization done. */
7201 if (TREE_SIDE_EFFECTS (arg
)
7202 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7203 || POINTER_TYPE_P (TREE_TYPE (arg
))
7205 || folding_initializer
7206 || force_folding_builtin_constant_p
)
7207 return integer_zero_node
;
7212 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7213 return it as a truthvalue. */
7216 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7219 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7221 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7222 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7223 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7224 pred_type
= TREE_VALUE (arg_types
);
7225 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7227 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7228 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7229 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7232 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7233 build_int_cst (ret_type
, 0));
7236 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7237 NULL_TREE if no simplification is possible. */
7240 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7242 tree inner
, fndecl
, inner_arg0
;
7243 enum tree_code code
;
7245 /* Distribute the expected value over short-circuiting operators.
7246 See through the cast from truthvalue_type_node to long. */
7248 while (CONVERT_EXPR_P (inner_arg0
)
7249 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7250 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7251 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7253 /* If this is a builtin_expect within a builtin_expect keep the
7254 inner one. See through a comparison against a constant. It
7255 might have been added to create a thruthvalue. */
7258 if (COMPARISON_CLASS_P (inner
)
7259 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7260 inner
= TREE_OPERAND (inner
, 0);
7262 if (TREE_CODE (inner
) == CALL_EXPR
7263 && (fndecl
= get_callee_fndecl (inner
))
7264 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7265 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7269 code
= TREE_CODE (inner
);
7270 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7272 tree op0
= TREE_OPERAND (inner
, 0);
7273 tree op1
= TREE_OPERAND (inner
, 1);
7275 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7276 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7277 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7279 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7282 /* If the argument isn't invariant then there's nothing else we can do. */
7283 if (!TREE_CONSTANT (inner_arg0
))
7286 /* If we expect that a comparison against the argument will fold to
7287 a constant return the constant. In practice, this means a true
7288 constant or the address of a non-weak symbol. */
7291 if (TREE_CODE (inner
) == ADDR_EXPR
)
7295 inner
= TREE_OPERAND (inner
, 0);
7297 while (TREE_CODE (inner
) == COMPONENT_REF
7298 || TREE_CODE (inner
) == ARRAY_REF
);
7299 if ((TREE_CODE (inner
) == VAR_DECL
7300 || TREE_CODE (inner
) == FUNCTION_DECL
)
7301 && DECL_WEAK (inner
))
7305 /* Otherwise, ARG0 already has the proper type for the return value. */
7309 /* Fold a call to __builtin_classify_type with argument ARG. */
7312 fold_builtin_classify_type (tree arg
)
7315 return build_int_cst (integer_type_node
, no_type_class
);
7317 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7320 /* Fold a call to __builtin_strlen with argument ARG. */
7323 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7325 if (!validate_arg (arg
, POINTER_TYPE
))
7329 tree len
= c_strlen (arg
, 0);
7332 return fold_convert_loc (loc
, type
, len
);
7338 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7341 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7343 REAL_VALUE_TYPE real
;
7345 /* __builtin_inff is intended to be usable to define INFINITY on all
7346 targets. If an infinity is not available, INFINITY expands "to a
7347 positive constant of type float that overflows at translation
7348 time", footnote "In this case, using INFINITY will violate the
7349 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7350 Thus we pedwarn to ensure this constraint violation is
7352 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7353 pedwarn (loc
, 0, "target format does not support infinity");
7356 return build_real (type
, real
);
7359 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7362 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7364 REAL_VALUE_TYPE real
;
7367 if (!validate_arg (arg
, POINTER_TYPE
))
7369 str
= c_getstr (arg
);
7373 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7376 return build_real (type
, real
);
7379 /* Return true if the floating point expression T has an integer value.
7380 We also allow +Inf, -Inf and NaN to be considered integer values. */
7383 integer_valued_real_p (tree t
)
7385 switch (TREE_CODE (t
))
7392 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7397 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7404 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7405 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7408 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7409 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7412 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7416 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7417 if (TREE_CODE (type
) == INTEGER_TYPE
)
7419 if (TREE_CODE (type
) == REAL_TYPE
)
7420 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7425 switch (builtin_mathfn_code (t
))
7427 CASE_FLT_FN (BUILT_IN_CEIL
):
7428 CASE_FLT_FN (BUILT_IN_FLOOR
):
7429 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7430 CASE_FLT_FN (BUILT_IN_RINT
):
7431 CASE_FLT_FN (BUILT_IN_ROUND
):
7432 CASE_FLT_FN (BUILT_IN_TRUNC
):
7435 CASE_FLT_FN (BUILT_IN_FMIN
):
7436 CASE_FLT_FN (BUILT_IN_FMAX
):
7437 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7438 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7451 /* FNDECL is assumed to be a builtin where truncation can be propagated
7452 across (for instance floor((double)f) == (double)floorf (f).
7453 Do the transformation for a call with argument ARG. */
7456 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7458 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7460 if (!validate_arg (arg
, REAL_TYPE
))
7463 /* Integer rounding functions are idempotent. */
7464 if (fcode
== builtin_mathfn_code (arg
))
7467 /* If argument is already integer valued, and we don't need to worry
7468 about setting errno, there's no need to perform rounding. */
7469 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7474 tree arg0
= strip_float_extensions (arg
);
7475 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7476 tree newtype
= TREE_TYPE (arg0
);
7479 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7480 && (decl
= mathfn_built_in (newtype
, fcode
)))
7481 return fold_convert_loc (loc
, ftype
,
7482 build_call_expr_loc (loc
, decl
, 1,
7483 fold_convert_loc (loc
,
7490 /* FNDECL is assumed to be builtin which can narrow the FP type of
7491 the argument, for instance lround((double)f) -> lroundf (f).
7492 Do the transformation for a call with argument ARG. */
7495 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7497 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7499 if (!validate_arg (arg
, REAL_TYPE
))
7502 /* If argument is already integer valued, and we don't need to worry
7503 about setting errno, there's no need to perform rounding. */
7504 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7505 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7506 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7510 tree ftype
= TREE_TYPE (arg
);
7511 tree arg0
= strip_float_extensions (arg
);
7512 tree newtype
= TREE_TYPE (arg0
);
7515 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7516 && (decl
= mathfn_built_in (newtype
, fcode
)))
7517 return build_call_expr_loc (loc
, decl
, 1,
7518 fold_convert_loc (loc
, newtype
, arg0
));
7521 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7522 sizeof (int) == sizeof (long). */
7523 if (TYPE_PRECISION (integer_type_node
)
7524 == TYPE_PRECISION (long_integer_type_node
))
7526 tree newfn
= NULL_TREE
;
7529 CASE_FLT_FN (BUILT_IN_ICEIL
):
7530 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7533 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7534 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7537 CASE_FLT_FN (BUILT_IN_IROUND
):
7538 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7541 CASE_FLT_FN (BUILT_IN_IRINT
):
7542 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7551 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7552 return fold_convert_loc (loc
,
7553 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7557 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7558 sizeof (long long) == sizeof (long). */
7559 if (TYPE_PRECISION (long_long_integer_type_node
)
7560 == TYPE_PRECISION (long_integer_type_node
))
7562 tree newfn
= NULL_TREE
;
7565 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7566 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7569 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7570 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7573 CASE_FLT_FN (BUILT_IN_LLROUND
):
7574 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7577 CASE_FLT_FN (BUILT_IN_LLRINT
):
7578 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7587 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7588 return fold_convert_loc (loc
,
7589 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7596 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7597 return type. Return NULL_TREE if no simplification can be made. */
7600 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7604 if (!validate_arg (arg
, COMPLEX_TYPE
)
7605 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7608 /* Calculate the result when the argument is a constant. */
7609 if (TREE_CODE (arg
) == COMPLEX_CST
7610 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7614 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7616 tree real
= TREE_OPERAND (arg
, 0);
7617 tree imag
= TREE_OPERAND (arg
, 1);
7619 /* If either part is zero, cabs is fabs of the other. */
7620 if (real_zerop (real
))
7621 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7622 if (real_zerop (imag
))
7623 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7625 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7626 if (flag_unsafe_math_optimizations
7627 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7629 const REAL_VALUE_TYPE sqrt2_trunc
7630 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7632 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7633 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7634 build_real (type
, sqrt2_trunc
));
7638 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7639 if (TREE_CODE (arg
) == NEGATE_EXPR
7640 || TREE_CODE (arg
) == CONJ_EXPR
)
7641 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7643 /* Don't do this when optimizing for size. */
7644 if (flag_unsafe_math_optimizations
7645 && optimize
&& optimize_function_for_speed_p (cfun
))
7647 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7649 if (sqrtfn
!= NULL_TREE
)
7651 tree rpart
, ipart
, result
;
7653 arg
= builtin_save_expr (arg
);
7655 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7656 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7658 rpart
= builtin_save_expr (rpart
);
7659 ipart
= builtin_save_expr (ipart
);
7661 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7662 fold_build2_loc (loc
, MULT_EXPR
, type
,
7664 fold_build2_loc (loc
, MULT_EXPR
, type
,
7667 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7674 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7675 complex tree type of the result. If NEG is true, the imaginary
7676 zero is negative. */
7679 build_complex_cproj (tree type
, bool neg
)
7681 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7685 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7686 build_real (TREE_TYPE (type
), rzero
));
7689 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7690 return type. Return NULL_TREE if no simplification can be made. */
7693 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7695 if (!validate_arg (arg
, COMPLEX_TYPE
)
7696 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7699 /* If there are no infinities, return arg. */
7700 if (! HONOR_INFINITIES (type
))
7701 return non_lvalue_loc (loc
, arg
);
7703 /* Calculate the result when the argument is a constant. */
7704 if (TREE_CODE (arg
) == COMPLEX_CST
)
7706 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7707 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7709 if (real_isinf (real
) || real_isinf (imag
))
7710 return build_complex_cproj (type
, imag
->sign
);
7714 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7716 tree real
= TREE_OPERAND (arg
, 0);
7717 tree imag
= TREE_OPERAND (arg
, 1);
7722 /* If the real part is inf and the imag part is known to be
7723 nonnegative, return (inf + 0i). Remember side-effects are
7724 possible in the imag part. */
7725 if (TREE_CODE (real
) == REAL_CST
7726 && real_isinf (TREE_REAL_CST_PTR (real
))
7727 && tree_expr_nonnegative_p (imag
))
7728 return omit_one_operand_loc (loc
, type
,
7729 build_complex_cproj (type
, false),
7732 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7733 Remember side-effects are possible in the real part. */
7734 if (TREE_CODE (imag
) == REAL_CST
7735 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7737 omit_one_operand_loc (loc
, type
,
7738 build_complex_cproj (type
, TREE_REAL_CST_PTR
7739 (imag
)->sign
), arg
);
7745 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7746 Return NULL_TREE if no simplification can be made. */
7749 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7752 enum built_in_function fcode
;
7755 if (!validate_arg (arg
, REAL_TYPE
))
7758 /* Calculate the result when the argument is a constant. */
7759 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7762 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7763 fcode
= builtin_mathfn_code (arg
);
7764 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7766 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7767 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7768 CALL_EXPR_ARG (arg
, 0),
7769 build_real (type
, dconsthalf
));
7770 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7773 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7774 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7776 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7780 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7782 /* The inner root was either sqrt or cbrt. */
7783 /* This was a conditional expression but it triggered a bug
7785 REAL_VALUE_TYPE dconstroot
;
7786 if (BUILTIN_SQRT_P (fcode
))
7787 dconstroot
= dconsthalf
;
7789 dconstroot
= dconst_third ();
7791 /* Adjust for the outer root. */
7792 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7793 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7794 tree_root
= build_real (type
, dconstroot
);
7795 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7799 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7800 if (flag_unsafe_math_optimizations
7801 && (fcode
== BUILT_IN_POW
7802 || fcode
== BUILT_IN_POWF
7803 || fcode
== BUILT_IN_POWL
))
7805 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7806 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7807 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7809 if (!tree_expr_nonnegative_p (arg0
))
7810 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7811 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7812 build_real (type
, dconsthalf
));
7813 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7819 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7820 Return NULL_TREE if no simplification can be made. */
7823 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7825 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7828 if (!validate_arg (arg
, REAL_TYPE
))
7831 /* Calculate the result when the argument is a constant. */
7832 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7835 if (flag_unsafe_math_optimizations
)
7837 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7838 if (BUILTIN_EXPONENT_P (fcode
))
7840 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7841 const REAL_VALUE_TYPE third_trunc
=
7842 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7843 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7844 CALL_EXPR_ARG (arg
, 0),
7845 build_real (type
, third_trunc
));
7846 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7849 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7850 if (BUILTIN_SQRT_P (fcode
))
7852 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7856 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7858 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7860 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7861 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7862 tree_root
= build_real (type
, dconstroot
);
7863 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7867 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7868 if (BUILTIN_CBRT_P (fcode
))
7870 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7871 if (tree_expr_nonnegative_p (arg0
))
7873 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7878 REAL_VALUE_TYPE dconstroot
;
7880 real_arithmetic (&dconstroot
, MULT_EXPR
,
7881 dconst_third_ptr (), dconst_third_ptr ());
7882 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7883 tree_root
= build_real (type
, dconstroot
);
7884 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7889 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7890 if (fcode
== BUILT_IN_POW
7891 || fcode
== BUILT_IN_POWF
7892 || fcode
== BUILT_IN_POWL
)
7894 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7895 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7896 if (tree_expr_nonnegative_p (arg00
))
7898 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7899 const REAL_VALUE_TYPE dconstroot
7900 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7901 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7902 build_real (type
, dconstroot
));
7903 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7910 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7911 TYPE is the type of the return value. Return NULL_TREE if no
7912 simplification can be made. */
7915 fold_builtin_cos (location_t loc
,
7916 tree arg
, tree type
, tree fndecl
)
7920 if (!validate_arg (arg
, REAL_TYPE
))
7923 /* Calculate the result when the argument is a constant. */
7924 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7927 /* Optimize cos(-x) into cos (x). */
7928 if ((narg
= fold_strip_sign_ops (arg
)))
7929 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7934 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7935 Return NULL_TREE if no simplification can be made. */
7938 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7940 if (validate_arg (arg
, REAL_TYPE
))
7944 /* Calculate the result when the argument is a constant. */
7945 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7948 /* Optimize cosh(-x) into cosh (x). */
7949 if ((narg
= fold_strip_sign_ops (arg
)))
7950 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7956 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7957 argument ARG. TYPE is the type of the return value. Return
7958 NULL_TREE if no simplification can be made. */
7961 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7964 if (validate_arg (arg
, COMPLEX_TYPE
)
7965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7969 /* Calculate the result when the argument is a constant. */
7970 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7973 /* Optimize fn(-x) into fn(x). */
7974 if ((tmp
= fold_strip_sign_ops (arg
)))
7975 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7981 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7982 Return NULL_TREE if no simplification can be made. */
7985 fold_builtin_tan (tree arg
, tree type
)
7987 enum built_in_function fcode
;
7990 if (!validate_arg (arg
, REAL_TYPE
))
7993 /* Calculate the result when the argument is a constant. */
7994 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7997 /* Optimize tan(atan(x)) = x. */
7998 fcode
= builtin_mathfn_code (arg
);
7999 if (flag_unsafe_math_optimizations
8000 && (fcode
== BUILT_IN_ATAN
8001 || fcode
== BUILT_IN_ATANF
8002 || fcode
== BUILT_IN_ATANL
))
8003 return CALL_EXPR_ARG (arg
, 0);
8008 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8009 NULL_TREE if no simplification can be made. */
8012 fold_builtin_sincos (location_t loc
,
8013 tree arg0
, tree arg1
, tree arg2
)
8018 if (!validate_arg (arg0
, REAL_TYPE
)
8019 || !validate_arg (arg1
, POINTER_TYPE
)
8020 || !validate_arg (arg2
, POINTER_TYPE
))
8023 type
= TREE_TYPE (arg0
);
8025 /* Calculate the result when the argument is a constant. */
8026 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
8029 /* Canonicalize sincos to cexpi. */
8030 if (!targetm
.libc_has_function (function_c99_math_complex
))
8032 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8036 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8037 call
= builtin_save_expr (call
);
8039 return build2 (COMPOUND_EXPR
, void_type_node
,
8040 build2 (MODIFY_EXPR
, void_type_node
,
8041 build_fold_indirect_ref_loc (loc
, arg1
),
8042 build1 (IMAGPART_EXPR
, type
, call
)),
8043 build2 (MODIFY_EXPR
, void_type_node
,
8044 build_fold_indirect_ref_loc (loc
, arg2
),
8045 build1 (REALPART_EXPR
, type
, call
)));
8048 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8049 NULL_TREE if no simplification can be made. */
8052 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8055 tree realp
, imagp
, ifn
;
8058 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8059 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8062 /* Calculate the result when the argument is a constant. */
8063 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8066 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8068 /* In case we can figure out the real part of arg0 and it is constant zero
8070 if (!targetm
.libc_has_function (function_c99_math_complex
))
8072 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8076 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8077 && real_zerop (realp
))
8079 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8080 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8083 /* In case we can easily decompose real and imaginary parts split cexp
8084 to exp (r) * cexpi (i). */
8085 if (flag_unsafe_math_optimizations
8088 tree rfn
, rcall
, icall
;
8090 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8094 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8098 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8099 icall
= builtin_save_expr (icall
);
8100 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8101 rcall
= builtin_save_expr (rcall
);
8102 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8103 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8105 fold_build1_loc (loc
, REALPART_EXPR
,
8107 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8109 fold_build1_loc (loc
, IMAGPART_EXPR
,
8116 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8117 Return NULL_TREE if no simplification can be made. */
8120 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8122 if (!validate_arg (arg
, REAL_TYPE
))
8125 /* Optimize trunc of constant value. */
8126 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8128 REAL_VALUE_TYPE r
, x
;
8129 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8131 x
= TREE_REAL_CST (arg
);
8132 real_trunc (&r
, TYPE_MODE (type
), &x
);
8133 return build_real (type
, r
);
8136 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8139 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8140 Return NULL_TREE if no simplification can be made. */
8143 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8145 if (!validate_arg (arg
, REAL_TYPE
))
8148 /* Optimize floor of constant value. */
8149 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8153 x
= TREE_REAL_CST (arg
);
8154 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8156 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8159 real_floor (&r
, TYPE_MODE (type
), &x
);
8160 return build_real (type
, r
);
8164 /* Fold floor (x) where x is nonnegative to trunc (x). */
8165 if (tree_expr_nonnegative_p (arg
))
8167 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8169 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8172 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8175 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8176 Return NULL_TREE if no simplification can be made. */
8179 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8181 if (!validate_arg (arg
, REAL_TYPE
))
8184 /* Optimize ceil of constant value. */
8185 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8189 x
= TREE_REAL_CST (arg
);
8190 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8192 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8195 real_ceil (&r
, TYPE_MODE (type
), &x
);
8196 return build_real (type
, r
);
8200 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8203 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8204 Return NULL_TREE if no simplification can be made. */
8207 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8209 if (!validate_arg (arg
, REAL_TYPE
))
8212 /* Optimize round of constant value. */
8213 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8217 x
= TREE_REAL_CST (arg
);
8218 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8220 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8223 real_round (&r
, TYPE_MODE (type
), &x
);
8224 return build_real (type
, r
);
8228 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8231 /* Fold function call to builtin lround, lroundf or lroundl (or the
8232 corresponding long long versions) and other rounding functions. ARG
8233 is the argument to the call. Return NULL_TREE if no simplification
8237 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8239 if (!validate_arg (arg
, REAL_TYPE
))
8242 /* Optimize lround of constant value. */
8243 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8245 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8247 if (real_isfinite (&x
))
8249 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8250 tree ftype
= TREE_TYPE (arg
);
8254 switch (DECL_FUNCTION_CODE (fndecl
))
8256 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8257 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8258 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8259 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8262 CASE_FLT_FN (BUILT_IN_ICEIL
):
8263 CASE_FLT_FN (BUILT_IN_LCEIL
):
8264 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8265 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8268 CASE_FLT_FN (BUILT_IN_IROUND
):
8269 CASE_FLT_FN (BUILT_IN_LROUND
):
8270 CASE_FLT_FN (BUILT_IN_LLROUND
):
8271 real_round (&r
, TYPE_MODE (ftype
), &x
);
8278 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8280 return wide_int_to_tree (itype
, val
);
8284 switch (DECL_FUNCTION_CODE (fndecl
))
8286 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8287 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8288 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8289 if (tree_expr_nonnegative_p (arg
))
8290 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8291 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8296 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8299 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8300 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8301 the argument to the call. Return NULL_TREE if no simplification can
8305 fold_builtin_bitop (tree fndecl
, tree arg
)
8307 if (!validate_arg (arg
, INTEGER_TYPE
))
8310 /* Optimize for constant argument. */
8311 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8313 tree type
= TREE_TYPE (arg
);
8316 switch (DECL_FUNCTION_CODE (fndecl
))
8318 CASE_INT_FN (BUILT_IN_FFS
):
8319 result
= wi::ffs (arg
);
8322 CASE_INT_FN (BUILT_IN_CLZ
):
8323 if (wi::ne_p (arg
, 0))
8324 result
= wi::clz (arg
);
8325 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8326 result
= TYPE_PRECISION (type
);
8329 CASE_INT_FN (BUILT_IN_CTZ
):
8330 if (wi::ne_p (arg
, 0))
8331 result
= wi::ctz (arg
);
8332 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8333 result
= TYPE_PRECISION (type
);
8336 CASE_INT_FN (BUILT_IN_CLRSB
):
8337 result
= wi::clrsb (arg
);
8340 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8341 result
= wi::popcount (arg
);
8344 CASE_INT_FN (BUILT_IN_PARITY
):
8345 result
= wi::parity (arg
);
8352 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8358 /* Fold function call to builtin_bswap and the short, long and long long
8359 variants. Return NULL_TREE if no simplification can be made. */
8361 fold_builtin_bswap (tree fndecl
, tree arg
)
8363 if (! validate_arg (arg
, INTEGER_TYPE
))
8366 /* Optimize constant value. */
8367 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8369 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8371 switch (DECL_FUNCTION_CODE (fndecl
))
8373 case BUILT_IN_BSWAP16
:
8374 case BUILT_IN_BSWAP32
:
8375 case BUILT_IN_BSWAP64
:
8377 signop sgn
= TYPE_SIGN (type
);
8379 wide_int_to_tree (type
,
8380 wide_int::from (arg
, TYPE_PRECISION (type
),
8392 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8393 NULL_TREE if no simplification can be made. */
8396 fold_builtin_hypot (location_t loc
, tree fndecl
,
8397 tree arg0
, tree arg1
, tree type
)
8399 tree res
, narg0
, narg1
;
8401 if (!validate_arg (arg0
, REAL_TYPE
)
8402 || !validate_arg (arg1
, REAL_TYPE
))
8405 /* Calculate the result when the argument is a constant. */
8406 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8409 /* If either argument to hypot has a negate or abs, strip that off.
8410 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8411 narg0
= fold_strip_sign_ops (arg0
);
8412 narg1
= fold_strip_sign_ops (arg1
);
8415 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8416 narg1
? narg1
: arg1
);
8419 /* If either argument is zero, hypot is fabs of the other. */
8420 if (real_zerop (arg0
))
8421 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8422 else if (real_zerop (arg1
))
8423 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8425 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8426 if (flag_unsafe_math_optimizations
8427 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8429 const REAL_VALUE_TYPE sqrt2_trunc
8430 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8431 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8432 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8433 build_real (type
, sqrt2_trunc
));
8440 /* Fold a builtin function call to pow, powf, or powl. Return
8441 NULL_TREE if no simplification can be made. */
8443 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8447 if (!validate_arg (arg0
, REAL_TYPE
)
8448 || !validate_arg (arg1
, REAL_TYPE
))
8451 /* Calculate the result when the argument is a constant. */
8452 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8455 /* Optimize pow(1.0,y) = 1.0. */
8456 if (real_onep (arg0
))
8457 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8459 if (TREE_CODE (arg1
) == REAL_CST
8460 && !TREE_OVERFLOW (arg1
))
8462 REAL_VALUE_TYPE cint
;
8466 c
= TREE_REAL_CST (arg1
);
8468 /* Optimize pow(x,0.0) = 1.0. */
8469 if (REAL_VALUES_EQUAL (c
, dconst0
))
8470 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8473 /* Optimize pow(x,1.0) = x. */
8474 if (REAL_VALUES_EQUAL (c
, dconst1
))
8477 /* Optimize pow(x,-1.0) = 1.0/x. */
8478 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8479 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8480 build_real (type
, dconst1
), arg0
);
8482 /* Optimize pow(x,0.5) = sqrt(x). */
8483 if (flag_unsafe_math_optimizations
8484 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8486 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8488 if (sqrtfn
!= NULL_TREE
)
8489 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8492 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8493 if (flag_unsafe_math_optimizations
)
8495 const REAL_VALUE_TYPE dconstroot
8496 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8498 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8500 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8501 if (cbrtfn
!= NULL_TREE
)
8502 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8506 /* Check for an integer exponent. */
8507 n
= real_to_integer (&c
);
8508 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8509 if (real_identical (&c
, &cint
))
8511 /* Attempt to evaluate pow at compile-time, unless this should
8512 raise an exception. */
8513 if (TREE_CODE (arg0
) == REAL_CST
8514 && !TREE_OVERFLOW (arg0
)
8516 || (!flag_trapping_math
&& !flag_errno_math
)
8517 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8522 x
= TREE_REAL_CST (arg0
);
8523 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8524 if (flag_unsafe_math_optimizations
|| !inexact
)
8525 return build_real (type
, x
);
8528 /* Strip sign ops from even integer powers. */
8529 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8531 tree narg0
= fold_strip_sign_ops (arg0
);
8533 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8538 if (flag_unsafe_math_optimizations
)
8540 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8542 /* Optimize pow(expN(x),y) = expN(x*y). */
8543 if (BUILTIN_EXPONENT_P (fcode
))
8545 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8546 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8547 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8548 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8551 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8552 if (BUILTIN_SQRT_P (fcode
))
8554 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8555 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8556 build_real (type
, dconsthalf
));
8557 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8560 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8561 if (BUILTIN_CBRT_P (fcode
))
8563 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8564 if (tree_expr_nonnegative_p (arg
))
8566 const REAL_VALUE_TYPE dconstroot
8567 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8568 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8569 build_real (type
, dconstroot
));
8570 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8574 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8575 if (fcode
== BUILT_IN_POW
8576 || fcode
== BUILT_IN_POWF
8577 || fcode
== BUILT_IN_POWL
)
8579 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8580 if (tree_expr_nonnegative_p (arg00
))
8582 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8583 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8584 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8592 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8593 Return NULL_TREE if no simplification can be made. */
8595 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8596 tree arg0
, tree arg1
, tree type
)
8598 if (!validate_arg (arg0
, REAL_TYPE
)
8599 || !validate_arg (arg1
, INTEGER_TYPE
))
8602 /* Optimize pow(1.0,y) = 1.0. */
8603 if (real_onep (arg0
))
8604 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8606 if (tree_fits_shwi_p (arg1
))
8608 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8610 /* Evaluate powi at compile-time. */
8611 if (TREE_CODE (arg0
) == REAL_CST
8612 && !TREE_OVERFLOW (arg0
))
8615 x
= TREE_REAL_CST (arg0
);
8616 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8617 return build_real (type
, x
);
8620 /* Optimize pow(x,0) = 1.0. */
8622 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8625 /* Optimize pow(x,1) = x. */
8629 /* Optimize pow(x,-1) = 1.0/x. */
8631 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8632 build_real (type
, dconst1
), arg0
);
8638 /* A subroutine of fold_builtin to fold the various exponent
8639 functions. Return NULL_TREE if no simplification can be made.
8640 FUNC is the corresponding MPFR exponent function. */
8643 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8644 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8646 if (validate_arg (arg
, REAL_TYPE
))
8648 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8651 /* Calculate the result when the argument is a constant. */
8652 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8655 /* Optimize expN(logN(x)) = x. */
8656 if (flag_unsafe_math_optimizations
)
8658 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8660 if ((func
== mpfr_exp
8661 && (fcode
== BUILT_IN_LOG
8662 || fcode
== BUILT_IN_LOGF
8663 || fcode
== BUILT_IN_LOGL
))
8664 || (func
== mpfr_exp2
8665 && (fcode
== BUILT_IN_LOG2
8666 || fcode
== BUILT_IN_LOG2F
8667 || fcode
== BUILT_IN_LOG2L
))
8668 || (func
== mpfr_exp10
8669 && (fcode
== BUILT_IN_LOG10
8670 || fcode
== BUILT_IN_LOG10F
8671 || fcode
== BUILT_IN_LOG10L
)))
8672 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8679 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8680 arguments to the call, and TYPE is its return type.
8681 Return NULL_TREE if no simplification can be made. */
8684 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8686 if (!validate_arg (arg1
, POINTER_TYPE
)
8687 || !validate_arg (arg2
, INTEGER_TYPE
)
8688 || !validate_arg (len
, INTEGER_TYPE
))
8694 if (TREE_CODE (arg2
) != INTEGER_CST
8695 || !tree_fits_uhwi_p (len
))
8698 p1
= c_getstr (arg1
);
8699 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8705 if (target_char_cast (arg2
, &c
))
8708 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8711 return build_int_cst (TREE_TYPE (arg1
), 0);
8713 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8714 return fold_convert_loc (loc
, type
, tem
);
8720 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8721 Return NULL_TREE if no simplification can be made. */
8724 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8726 const char *p1
, *p2
;
8728 if (!validate_arg (arg1
, POINTER_TYPE
)
8729 || !validate_arg (arg2
, POINTER_TYPE
)
8730 || !validate_arg (len
, INTEGER_TYPE
))
8733 /* If the LEN parameter is zero, return zero. */
8734 if (integer_zerop (len
))
8735 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8738 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8739 if (operand_equal_p (arg1
, arg2
, 0))
8740 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8742 p1
= c_getstr (arg1
);
8743 p2
= c_getstr (arg2
);
8745 /* If all arguments are constant, and the value of len is not greater
8746 than the lengths of arg1 and arg2, evaluate at compile-time. */
8747 if (tree_fits_uhwi_p (len
) && p1
&& p2
8748 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8749 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8751 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8754 return integer_one_node
;
8756 return integer_minus_one_node
;
8758 return integer_zero_node
;
8761 /* If len parameter is one, return an expression corresponding to
8762 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8763 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8765 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8766 tree cst_uchar_ptr_node
8767 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8770 = fold_convert_loc (loc
, integer_type_node
,
8771 build1 (INDIRECT_REF
, cst_uchar_node
,
8772 fold_convert_loc (loc
,
8776 = fold_convert_loc (loc
, integer_type_node
,
8777 build1 (INDIRECT_REF
, cst_uchar_node
,
8778 fold_convert_loc (loc
,
8781 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8787 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8788 Return NULL_TREE if no simplification can be made. */
8791 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8793 const char *p1
, *p2
;
8795 if (!validate_arg (arg1
, POINTER_TYPE
)
8796 || !validate_arg (arg2
, POINTER_TYPE
))
8799 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8800 if (operand_equal_p (arg1
, arg2
, 0))
8801 return integer_zero_node
;
8803 p1
= c_getstr (arg1
);
8804 p2
= c_getstr (arg2
);
8808 const int i
= strcmp (p1
, p2
);
8810 return integer_minus_one_node
;
8812 return integer_one_node
;
8814 return integer_zero_node
;
8817 /* If the second arg is "", return *(const unsigned char*)arg1. */
8818 if (p2
&& *p2
== '\0')
8820 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8821 tree cst_uchar_ptr_node
8822 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8824 return fold_convert_loc (loc
, integer_type_node
,
8825 build1 (INDIRECT_REF
, cst_uchar_node
,
8826 fold_convert_loc (loc
,
8831 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8832 if (p1
&& *p1
== '\0')
8834 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8835 tree cst_uchar_ptr_node
8836 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8839 = fold_convert_loc (loc
, integer_type_node
,
8840 build1 (INDIRECT_REF
, cst_uchar_node
,
8841 fold_convert_loc (loc
,
8844 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8850 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8851 Return NULL_TREE if no simplification can be made. */
8854 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8856 const char *p1
, *p2
;
8858 if (!validate_arg (arg1
, POINTER_TYPE
)
8859 || !validate_arg (arg2
, POINTER_TYPE
)
8860 || !validate_arg (len
, INTEGER_TYPE
))
8863 /* If the LEN parameter is zero, return zero. */
8864 if (integer_zerop (len
))
8865 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8868 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8869 if (operand_equal_p (arg1
, arg2
, 0))
8870 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8872 p1
= c_getstr (arg1
);
8873 p2
= c_getstr (arg2
);
8875 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8877 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8879 return integer_one_node
;
8881 return integer_minus_one_node
;
8883 return integer_zero_node
;
8886 /* If the second arg is "", and the length is greater than zero,
8887 return *(const unsigned char*)arg1. */
8888 if (p2
&& *p2
== '\0'
8889 && TREE_CODE (len
) == INTEGER_CST
8890 && tree_int_cst_sgn (len
) == 1)
8892 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8893 tree cst_uchar_ptr_node
8894 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8896 return fold_convert_loc (loc
, integer_type_node
,
8897 build1 (INDIRECT_REF
, cst_uchar_node
,
8898 fold_convert_loc (loc
,
8903 /* If the first arg is "", and the length is greater than zero,
8904 return -*(const unsigned char*)arg2. */
8905 if (p1
&& *p1
== '\0'
8906 && TREE_CODE (len
) == INTEGER_CST
8907 && tree_int_cst_sgn (len
) == 1)
8909 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8910 tree cst_uchar_ptr_node
8911 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8913 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8914 build1 (INDIRECT_REF
, cst_uchar_node
,
8915 fold_convert_loc (loc
,
8918 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8921 /* If len parameter is one, return an expression corresponding to
8922 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8923 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8925 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8926 tree cst_uchar_ptr_node
8927 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8929 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8930 build1 (INDIRECT_REF
, cst_uchar_node
,
8931 fold_convert_loc (loc
,
8934 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8935 build1 (INDIRECT_REF
, cst_uchar_node
,
8936 fold_convert_loc (loc
,
8939 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8945 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8946 ARG. Return NULL_TREE if no simplification can be made. */
8949 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8951 if (!validate_arg (arg
, REAL_TYPE
))
8954 /* If ARG is a compile-time constant, determine the result. */
8955 if (TREE_CODE (arg
) == REAL_CST
8956 && !TREE_OVERFLOW (arg
))
8960 c
= TREE_REAL_CST (arg
);
8961 return (REAL_VALUE_NEGATIVE (c
)
8962 ? build_one_cst (type
)
8963 : build_zero_cst (type
));
8966 /* If ARG is non-negative, the result is always zero. */
8967 if (tree_expr_nonnegative_p (arg
))
8968 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8970 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8971 if (!HONOR_SIGNED_ZEROS (arg
))
8972 return fold_convert (type
,
8973 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8974 build_real (TREE_TYPE (arg
), dconst0
)));
8979 /* Fold function call to builtin copysign, copysignf or copysignl with
8980 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8984 fold_builtin_copysign (location_t loc
, tree fndecl
,
8985 tree arg1
, tree arg2
, tree type
)
8989 if (!validate_arg (arg1
, REAL_TYPE
)
8990 || !validate_arg (arg2
, REAL_TYPE
))
8993 /* copysign(X,X) is X. */
8994 if (operand_equal_p (arg1
, arg2
, 0))
8995 return fold_convert_loc (loc
, type
, arg1
);
8997 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8998 if (TREE_CODE (arg1
) == REAL_CST
8999 && TREE_CODE (arg2
) == REAL_CST
9000 && !TREE_OVERFLOW (arg1
)
9001 && !TREE_OVERFLOW (arg2
))
9003 REAL_VALUE_TYPE c1
, c2
;
9005 c1
= TREE_REAL_CST (arg1
);
9006 c2
= TREE_REAL_CST (arg2
);
9007 /* c1.sign := c2.sign. */
9008 real_copysign (&c1
, &c2
);
9009 return build_real (type
, c1
);
9012 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9013 Remember to evaluate Y for side-effects. */
9014 if (tree_expr_nonnegative_p (arg2
))
9015 return omit_one_operand_loc (loc
, type
,
9016 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9019 /* Strip sign changing operations for the first argument. */
9020 tem
= fold_strip_sign_ops (arg1
);
9022 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9027 /* Fold a call to builtin isascii with argument ARG. */
9030 fold_builtin_isascii (location_t loc
, tree arg
)
9032 if (!validate_arg (arg
, INTEGER_TYPE
))
9036 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9037 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9038 build_int_cst (integer_type_node
,
9039 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9040 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9041 arg
, integer_zero_node
);
9045 /* Fold a call to builtin toascii with argument ARG. */
9048 fold_builtin_toascii (location_t loc
, tree arg
)
9050 if (!validate_arg (arg
, INTEGER_TYPE
))
9053 /* Transform toascii(c) -> (c & 0x7f). */
9054 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9055 build_int_cst (integer_type_node
, 0x7f));
9058 /* Fold a call to builtin isdigit with argument ARG. */
9061 fold_builtin_isdigit (location_t loc
, tree arg
)
9063 if (!validate_arg (arg
, INTEGER_TYPE
))
9067 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9068 /* According to the C standard, isdigit is unaffected by locale.
9069 However, it definitely is affected by the target character set. */
9070 unsigned HOST_WIDE_INT target_digit0
9071 = lang_hooks
.to_target_charset ('0');
9073 if (target_digit0
== 0)
9076 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9077 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9078 build_int_cst (unsigned_type_node
, target_digit0
));
9079 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9080 build_int_cst (unsigned_type_node
, 9));
9084 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9087 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9089 if (!validate_arg (arg
, REAL_TYPE
))
9092 arg
= fold_convert_loc (loc
, type
, arg
);
9093 if (TREE_CODE (arg
) == REAL_CST
)
9094 return fold_abs_const (arg
, type
);
9095 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9098 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9101 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9103 if (!validate_arg (arg
, INTEGER_TYPE
))
9106 arg
= fold_convert_loc (loc
, type
, arg
);
9107 if (TREE_CODE (arg
) == INTEGER_CST
)
9108 return fold_abs_const (arg
, type
);
9109 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9112 /* Fold a fma operation with arguments ARG[012]. */
9115 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9116 tree type
, tree arg0
, tree arg1
, tree arg2
)
9118 if (TREE_CODE (arg0
) == REAL_CST
9119 && TREE_CODE (arg1
) == REAL_CST
9120 && TREE_CODE (arg2
) == REAL_CST
)
9121 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9126 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9129 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9131 if (validate_arg (arg0
, REAL_TYPE
)
9132 && validate_arg (arg1
, REAL_TYPE
)
9133 && validate_arg (arg2
, REAL_TYPE
))
9135 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9139 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9140 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9141 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9146 /* Fold a call to builtin fmin or fmax. */
9149 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9150 tree type
, bool max
)
9152 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9154 /* Calculate the result when the argument is a constant. */
9155 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9160 /* If either argument is NaN, return the other one. Avoid the
9161 transformation if we get (and honor) a signalling NaN. Using
9162 omit_one_operand() ensures we create a non-lvalue. */
9163 if (TREE_CODE (arg0
) == REAL_CST
9164 && real_isnan (&TREE_REAL_CST (arg0
))
9165 && (! HONOR_SNANS (arg0
)
9166 || ! TREE_REAL_CST (arg0
).signalling
))
9167 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9168 if (TREE_CODE (arg1
) == REAL_CST
9169 && real_isnan (&TREE_REAL_CST (arg1
))
9170 && (! HONOR_SNANS (arg1
)
9171 || ! TREE_REAL_CST (arg1
).signalling
))
9172 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9174 /* Transform fmin/fmax(x,x) -> x. */
9175 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9176 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9178 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9179 functions to return the numeric arg if the other one is NaN.
9180 These tree codes don't honor that, so only transform if
9181 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9182 handled, so we don't have to worry about it either. */
9183 if (flag_finite_math_only
)
9184 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9185 fold_convert_loc (loc
, type
, arg0
),
9186 fold_convert_loc (loc
, type
, arg1
));
9191 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9194 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9196 if (validate_arg (arg
, COMPLEX_TYPE
)
9197 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9199 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9203 tree new_arg
= builtin_save_expr (arg
);
9204 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9205 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9206 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9213 /* Fold a call to builtin logb/ilogb. */
9216 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9218 if (! validate_arg (arg
, REAL_TYPE
))
9223 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9225 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9231 /* If arg is Inf or NaN and we're logb, return it. */
9232 if (TREE_CODE (rettype
) == REAL_TYPE
)
9234 /* For logb(-Inf) we have to return +Inf. */
9235 if (real_isinf (value
) && real_isneg (value
))
9237 REAL_VALUE_TYPE tem
;
9239 return build_real (rettype
, tem
);
9241 return fold_convert_loc (loc
, rettype
, arg
);
9243 /* Fall through... */
9245 /* Zero may set errno and/or raise an exception for logb, also
9246 for ilogb we don't know FP_ILOGB0. */
9249 /* For normal numbers, proceed iff radix == 2. In GCC,
9250 normalized significands are in the range [0.5, 1.0). We
9251 want the exponent as if they were [1.0, 2.0) so get the
9252 exponent and subtract 1. */
9253 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9254 return fold_convert_loc (loc
, rettype
,
9255 build_int_cst (integer_type_node
,
9256 REAL_EXP (value
)-1));
9264 /* Fold a call to builtin significand, if radix == 2. */
9267 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9269 if (! validate_arg (arg
, REAL_TYPE
))
9274 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9276 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9283 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9284 return fold_convert_loc (loc
, rettype
, arg
);
9286 /* For normal numbers, proceed iff radix == 2. */
9287 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9289 REAL_VALUE_TYPE result
= *value
;
9290 /* In GCC, normalized significands are in the range [0.5,
9291 1.0). We want them to be [1.0, 2.0) so set the
9293 SET_REAL_EXP (&result
, 1);
9294 return build_real (rettype
, result
);
9303 /* Fold a call to builtin frexp, we can assume the base is 2. */
9306 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9308 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9313 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9316 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9318 /* Proceed if a valid pointer type was passed in. */
9319 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9321 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9327 /* For +-0, return (*exp = 0, +-0). */
9328 exp
= integer_zero_node
;
9333 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9334 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9337 /* Since the frexp function always expects base 2, and in
9338 GCC normalized significands are already in the range
9339 [0.5, 1.0), we have exactly what frexp wants. */
9340 REAL_VALUE_TYPE frac_rvt
= *value
;
9341 SET_REAL_EXP (&frac_rvt
, 0);
9342 frac
= build_real (rettype
, frac_rvt
);
9343 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9350 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9351 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9352 TREE_SIDE_EFFECTS (arg1
) = 1;
9353 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9359 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9360 then we can assume the base is two. If it's false, then we have to
9361 check the mode of the TYPE parameter in certain cases. */
9364 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9365 tree type
, bool ldexp
)
9367 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9372 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9373 if (real_zerop (arg0
) || integer_zerop (arg1
)
9374 || (TREE_CODE (arg0
) == REAL_CST
9375 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9376 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9378 /* If both arguments are constant, then try to evaluate it. */
9379 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9380 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9381 && tree_fits_shwi_p (arg1
))
9383 /* Bound the maximum adjustment to twice the range of the
9384 mode's valid exponents. Use abs to ensure the range is
9385 positive as a sanity check. */
9386 const long max_exp_adj
= 2 *
9387 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9388 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9390 /* Get the user-requested adjustment. */
9391 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9393 /* The requested adjustment must be inside this range. This
9394 is a preliminary cap to avoid things like overflow, we
9395 may still fail to compute the result for other reasons. */
9396 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9398 REAL_VALUE_TYPE initial_result
;
9400 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9402 /* Ensure we didn't overflow. */
9403 if (! real_isinf (&initial_result
))
9405 const REAL_VALUE_TYPE trunc_result
9406 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9408 /* Only proceed if the target mode can hold the
9410 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9411 return build_real (type
, trunc_result
);
9420 /* Fold a call to builtin modf. */
9423 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9425 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9430 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9433 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9435 /* Proceed if a valid pointer type was passed in. */
9436 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9438 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9439 REAL_VALUE_TYPE trunc
, frac
;
9445 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9446 trunc
= frac
= *value
;
9449 /* For +-Inf, return (*arg1 = arg0, +-0). */
9451 frac
.sign
= value
->sign
;
9455 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9456 real_trunc (&trunc
, VOIDmode
, value
);
9457 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9458 /* If the original number was negative and already
9459 integral, then the fractional part is -0.0. */
9460 if (value
->sign
&& frac
.cl
== rvc_zero
)
9461 frac
.sign
= value
->sign
;
9465 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9466 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9467 build_real (rettype
, trunc
));
9468 TREE_SIDE_EFFECTS (arg1
) = 1;
9469 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9470 build_real (rettype
, frac
));
9476 /* Given a location LOC, an interclass builtin function decl FNDECL
9477 and its single argument ARG, return an folded expression computing
9478 the same, or NULL_TREE if we either couldn't or didn't want to fold
9479 (the latter happen if there's an RTL instruction available). */
9482 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9486 if (!validate_arg (arg
, REAL_TYPE
))
9489 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9492 mode
= TYPE_MODE (TREE_TYPE (arg
));
9494 /* If there is no optab, try generic code. */
9495 switch (DECL_FUNCTION_CODE (fndecl
))
9499 CASE_FLT_FN (BUILT_IN_ISINF
):
9501 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9502 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9503 tree
const type
= TREE_TYPE (arg
);
9507 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9508 real_from_string (&r
, buf
);
9509 result
= build_call_expr (isgr_fn
, 2,
9510 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9511 build_real (type
, r
));
9514 CASE_FLT_FN (BUILT_IN_FINITE
):
9515 case BUILT_IN_ISFINITE
:
9517 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9518 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9519 tree
const type
= TREE_TYPE (arg
);
9523 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9524 real_from_string (&r
, buf
);
9525 result
= build_call_expr (isle_fn
, 2,
9526 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9527 build_real (type
, r
));
9528 /*result = fold_build2_loc (loc, UNGT_EXPR,
9529 TREE_TYPE (TREE_TYPE (fndecl)),
9530 fold_build1_loc (loc, ABS_EXPR, type, arg),
9531 build_real (type, r));
9532 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9533 TREE_TYPE (TREE_TYPE (fndecl)),
9537 case BUILT_IN_ISNORMAL
:
9539 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9540 islessequal(fabs(x),DBL_MAX). */
9541 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9542 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9543 tree
const type
= TREE_TYPE (arg
);
9544 REAL_VALUE_TYPE rmax
, rmin
;
9547 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9548 real_from_string (&rmax
, buf
);
9549 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9550 real_from_string (&rmin
, buf
);
9551 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9552 result
= build_call_expr (isle_fn
, 2, arg
,
9553 build_real (type
, rmax
));
9554 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9555 build_call_expr (isge_fn
, 2, arg
,
9556 build_real (type
, rmin
)));
9566 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9567 ARG is the argument for the call. */
9570 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9572 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9575 if (!validate_arg (arg
, REAL_TYPE
))
9578 switch (builtin_index
)
9580 case BUILT_IN_ISINF
:
9581 if (!HONOR_INFINITIES (arg
))
9582 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9584 if (TREE_CODE (arg
) == REAL_CST
)
9586 r
= TREE_REAL_CST (arg
);
9587 if (real_isinf (&r
))
9588 return real_compare (GT_EXPR
, &r
, &dconst0
)
9589 ? integer_one_node
: integer_minus_one_node
;
9591 return integer_zero_node
;
9596 case BUILT_IN_ISINF_SIGN
:
9598 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9599 /* In a boolean context, GCC will fold the inner COND_EXPR to
9600 1. So e.g. "if (isinf_sign(x))" would be folded to just
9601 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9602 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9603 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9604 tree tmp
= NULL_TREE
;
9606 arg
= builtin_save_expr (arg
);
9608 if (signbit_fn
&& isinf_fn
)
9610 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9611 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9613 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9614 signbit_call
, integer_zero_node
);
9615 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9616 isinf_call
, integer_zero_node
);
9618 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9619 integer_minus_one_node
, integer_one_node
);
9620 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9628 case BUILT_IN_ISFINITE
:
9629 if (!HONOR_NANS (arg
)
9630 && !HONOR_INFINITIES (arg
))
9631 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9633 if (TREE_CODE (arg
) == REAL_CST
)
9635 r
= TREE_REAL_CST (arg
);
9636 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9641 case BUILT_IN_ISNAN
:
9642 if (!HONOR_NANS (arg
))
9643 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9645 if (TREE_CODE (arg
) == REAL_CST
)
9647 r
= TREE_REAL_CST (arg
);
9648 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9651 arg
= builtin_save_expr (arg
);
9652 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9659 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9660 This builtin will generate code to return the appropriate floating
9661 point classification depending on the value of the floating point
9662 number passed in. The possible return values must be supplied as
9663 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9664 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9665 one floating point argument which is "type generic". */
9668 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9670 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9671 arg
, type
, res
, tmp
;
9676 /* Verify the required arguments in the original call. */
9678 || !validate_arg (args
[0], INTEGER_TYPE
)
9679 || !validate_arg (args
[1], INTEGER_TYPE
)
9680 || !validate_arg (args
[2], INTEGER_TYPE
)
9681 || !validate_arg (args
[3], INTEGER_TYPE
)
9682 || !validate_arg (args
[4], INTEGER_TYPE
)
9683 || !validate_arg (args
[5], REAL_TYPE
))
9687 fp_infinite
= args
[1];
9688 fp_normal
= args
[2];
9689 fp_subnormal
= args
[3];
9692 type
= TREE_TYPE (arg
);
9693 mode
= TYPE_MODE (type
);
9694 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9698 (fabs(x) == Inf ? FP_INFINITE :
9699 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9700 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9702 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9703 build_real (type
, dconst0
));
9704 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9705 tmp
, fp_zero
, fp_subnormal
);
9707 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9708 real_from_string (&r
, buf
);
9709 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9710 arg
, build_real (type
, r
));
9711 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9713 if (HONOR_INFINITIES (mode
))
9716 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9717 build_real (type
, r
));
9718 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9722 if (HONOR_NANS (mode
))
9724 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9725 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9731 /* Fold a call to an unordered comparison function such as
9732 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9733 being called and ARG0 and ARG1 are the arguments for the call.
9734 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9735 the opposite of the desired result. UNORDERED_CODE is used
9736 for modes that can hold NaNs and ORDERED_CODE is used for
9740 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9741 enum tree_code unordered_code
,
9742 enum tree_code ordered_code
)
9744 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9745 enum tree_code code
;
9747 enum tree_code code0
, code1
;
9748 tree cmp_type
= NULL_TREE
;
9750 type0
= TREE_TYPE (arg0
);
9751 type1
= TREE_TYPE (arg1
);
9753 code0
= TREE_CODE (type0
);
9754 code1
= TREE_CODE (type1
);
9756 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9757 /* Choose the wider of two real types. */
9758 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9760 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9762 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9765 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9766 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9768 if (unordered_code
== UNORDERED_EXPR
)
9770 if (!HONOR_NANS (arg0
))
9771 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9772 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9775 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9776 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9777 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9780 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9781 arithmetics if it can never overflow, or into internal functions that
9782 return both result of arithmetics and overflowed boolean flag in
9783 a complex integer result, or some other check for overflow. */
9786 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9787 tree arg0
, tree arg1
, tree arg2
)
9789 enum internal_fn ifn
= IFN_LAST
;
9790 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9791 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9794 case BUILT_IN_ADD_OVERFLOW
:
9795 case BUILT_IN_SADD_OVERFLOW
:
9796 case BUILT_IN_SADDL_OVERFLOW
:
9797 case BUILT_IN_SADDLL_OVERFLOW
:
9798 case BUILT_IN_UADD_OVERFLOW
:
9799 case BUILT_IN_UADDL_OVERFLOW
:
9800 case BUILT_IN_UADDLL_OVERFLOW
:
9801 ifn
= IFN_ADD_OVERFLOW
;
9803 case BUILT_IN_SUB_OVERFLOW
:
9804 case BUILT_IN_SSUB_OVERFLOW
:
9805 case BUILT_IN_SSUBL_OVERFLOW
:
9806 case BUILT_IN_SSUBLL_OVERFLOW
:
9807 case BUILT_IN_USUB_OVERFLOW
:
9808 case BUILT_IN_USUBL_OVERFLOW
:
9809 case BUILT_IN_USUBLL_OVERFLOW
:
9810 ifn
= IFN_SUB_OVERFLOW
;
9812 case BUILT_IN_MUL_OVERFLOW
:
9813 case BUILT_IN_SMUL_OVERFLOW
:
9814 case BUILT_IN_SMULL_OVERFLOW
:
9815 case BUILT_IN_SMULLL_OVERFLOW
:
9816 case BUILT_IN_UMUL_OVERFLOW
:
9817 case BUILT_IN_UMULL_OVERFLOW
:
9818 case BUILT_IN_UMULLL_OVERFLOW
:
9819 ifn
= IFN_MUL_OVERFLOW
;
9824 tree ctype
= build_complex_type (type
);
9825 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9827 tree tgt
= save_expr (call
);
9828 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9829 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9830 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9832 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9833 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9836 /* Fold a call to built-in function FNDECL with 0 arguments.
9837 This function returns NULL_TREE if no simplification was possible. */
9840 fold_builtin_0 (location_t loc
, tree fndecl
)
9842 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9843 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9846 CASE_FLT_FN (BUILT_IN_INF
):
9847 case BUILT_IN_INFD32
:
9848 case BUILT_IN_INFD64
:
9849 case BUILT_IN_INFD128
:
9850 return fold_builtin_inf (loc
, type
, true);
9852 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9853 return fold_builtin_inf (loc
, type
, false);
9855 case BUILT_IN_CLASSIFY_TYPE
:
9856 return fold_builtin_classify_type (NULL_TREE
);
9864 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9865 This function returns NULL_TREE if no simplification was possible. */
9868 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9870 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9871 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9874 case BUILT_IN_CONSTANT_P
:
9876 tree val
= fold_builtin_constant_p (arg0
);
9878 /* Gimplification will pull the CALL_EXPR for the builtin out of
9879 an if condition. When not optimizing, we'll not CSE it back.
9880 To avoid link error types of regressions, return false now. */
9881 if (!val
&& !optimize
)
9882 val
= integer_zero_node
;
9887 case BUILT_IN_CLASSIFY_TYPE
:
9888 return fold_builtin_classify_type (arg0
);
9890 case BUILT_IN_STRLEN
:
9891 return fold_builtin_strlen (loc
, type
, arg0
);
9893 CASE_FLT_FN (BUILT_IN_FABS
):
9894 case BUILT_IN_FABSD32
:
9895 case BUILT_IN_FABSD64
:
9896 case BUILT_IN_FABSD128
:
9897 return fold_builtin_fabs (loc
, arg0
, type
);
9901 case BUILT_IN_LLABS
:
9902 case BUILT_IN_IMAXABS
:
9903 return fold_builtin_abs (loc
, arg0
, type
);
9905 CASE_FLT_FN (BUILT_IN_CONJ
):
9906 if (validate_arg (arg0
, COMPLEX_TYPE
)
9907 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9908 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9911 CASE_FLT_FN (BUILT_IN_CREAL
):
9912 if (validate_arg (arg0
, COMPLEX_TYPE
)
9913 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9914 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
9917 CASE_FLT_FN (BUILT_IN_CIMAG
):
9918 if (validate_arg (arg0
, COMPLEX_TYPE
)
9919 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9920 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9923 CASE_FLT_FN (BUILT_IN_CCOS
):
9924 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9926 CASE_FLT_FN (BUILT_IN_CCOSH
):
9927 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9929 CASE_FLT_FN (BUILT_IN_CPROJ
):
9930 return fold_builtin_cproj (loc
, arg0
, type
);
9932 CASE_FLT_FN (BUILT_IN_CSIN
):
9933 if (validate_arg (arg0
, COMPLEX_TYPE
)
9934 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9935 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9938 CASE_FLT_FN (BUILT_IN_CSINH
):
9939 if (validate_arg (arg0
, COMPLEX_TYPE
)
9940 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9941 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9944 CASE_FLT_FN (BUILT_IN_CTAN
):
9945 if (validate_arg (arg0
, COMPLEX_TYPE
)
9946 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9947 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9950 CASE_FLT_FN (BUILT_IN_CTANH
):
9951 if (validate_arg (arg0
, COMPLEX_TYPE
)
9952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9953 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9956 CASE_FLT_FN (BUILT_IN_CLOG
):
9957 if (validate_arg (arg0
, COMPLEX_TYPE
)
9958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9959 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9962 CASE_FLT_FN (BUILT_IN_CSQRT
):
9963 if (validate_arg (arg0
, COMPLEX_TYPE
)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9965 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9968 CASE_FLT_FN (BUILT_IN_CASIN
):
9969 if (validate_arg (arg0
, COMPLEX_TYPE
)
9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9971 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9974 CASE_FLT_FN (BUILT_IN_CACOS
):
9975 if (validate_arg (arg0
, COMPLEX_TYPE
)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9977 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9980 CASE_FLT_FN (BUILT_IN_CATAN
):
9981 if (validate_arg (arg0
, COMPLEX_TYPE
)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9983 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9986 CASE_FLT_FN (BUILT_IN_CASINH
):
9987 if (validate_arg (arg0
, COMPLEX_TYPE
)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9989 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9992 CASE_FLT_FN (BUILT_IN_CACOSH
):
9993 if (validate_arg (arg0
, COMPLEX_TYPE
)
9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9995 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9998 CASE_FLT_FN (BUILT_IN_CATANH
):
9999 if (validate_arg (arg0
, COMPLEX_TYPE
)
10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10001 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10004 CASE_FLT_FN (BUILT_IN_CABS
):
10005 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10007 CASE_FLT_FN (BUILT_IN_CARG
):
10008 return fold_builtin_carg (loc
, arg0
, type
);
10010 CASE_FLT_FN (BUILT_IN_SQRT
):
10011 return fold_builtin_sqrt (loc
, arg0
, type
);
10013 CASE_FLT_FN (BUILT_IN_CBRT
):
10014 return fold_builtin_cbrt (loc
, arg0
, type
);
10016 CASE_FLT_FN (BUILT_IN_ASIN
):
10017 if (validate_arg (arg0
, REAL_TYPE
))
10018 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10019 &dconstm1
, &dconst1
, true);
10022 CASE_FLT_FN (BUILT_IN_ACOS
):
10023 if (validate_arg (arg0
, REAL_TYPE
))
10024 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10025 &dconstm1
, &dconst1
, true);
10028 CASE_FLT_FN (BUILT_IN_ATAN
):
10029 if (validate_arg (arg0
, REAL_TYPE
))
10030 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10033 CASE_FLT_FN (BUILT_IN_ASINH
):
10034 if (validate_arg (arg0
, REAL_TYPE
))
10035 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10038 CASE_FLT_FN (BUILT_IN_ACOSH
):
10039 if (validate_arg (arg0
, REAL_TYPE
))
10040 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10041 &dconst1
, NULL
, true);
10044 CASE_FLT_FN (BUILT_IN_ATANH
):
10045 if (validate_arg (arg0
, REAL_TYPE
))
10046 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10047 &dconstm1
, &dconst1
, false);
10050 CASE_FLT_FN (BUILT_IN_SIN
):
10051 if (validate_arg (arg0
, REAL_TYPE
))
10052 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10055 CASE_FLT_FN (BUILT_IN_COS
):
10056 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10058 CASE_FLT_FN (BUILT_IN_TAN
):
10059 return fold_builtin_tan (arg0
, type
);
10061 CASE_FLT_FN (BUILT_IN_CEXP
):
10062 return fold_builtin_cexp (loc
, arg0
, type
);
10064 CASE_FLT_FN (BUILT_IN_CEXPI
):
10065 if (validate_arg (arg0
, REAL_TYPE
))
10066 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10069 CASE_FLT_FN (BUILT_IN_SINH
):
10070 if (validate_arg (arg0
, REAL_TYPE
))
10071 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10074 CASE_FLT_FN (BUILT_IN_COSH
):
10075 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10077 CASE_FLT_FN (BUILT_IN_TANH
):
10078 if (validate_arg (arg0
, REAL_TYPE
))
10079 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10082 CASE_FLT_FN (BUILT_IN_ERF
):
10083 if (validate_arg (arg0
, REAL_TYPE
))
10084 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10087 CASE_FLT_FN (BUILT_IN_ERFC
):
10088 if (validate_arg (arg0
, REAL_TYPE
))
10089 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10092 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10093 if (validate_arg (arg0
, REAL_TYPE
))
10094 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10097 CASE_FLT_FN (BUILT_IN_EXP
):
10098 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10100 CASE_FLT_FN (BUILT_IN_EXP2
):
10101 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10103 CASE_FLT_FN (BUILT_IN_EXP10
):
10104 CASE_FLT_FN (BUILT_IN_POW10
):
10105 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10107 CASE_FLT_FN (BUILT_IN_EXPM1
):
10108 if (validate_arg (arg0
, REAL_TYPE
))
10109 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10112 CASE_FLT_FN (BUILT_IN_LOG
):
10113 if (validate_arg (arg0
, REAL_TYPE
))
10114 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10117 CASE_FLT_FN (BUILT_IN_LOG2
):
10118 if (validate_arg (arg0
, REAL_TYPE
))
10119 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10122 CASE_FLT_FN (BUILT_IN_LOG10
):
10123 if (validate_arg (arg0
, REAL_TYPE
))
10124 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10127 CASE_FLT_FN (BUILT_IN_LOG1P
):
10128 if (validate_arg (arg0
, REAL_TYPE
))
10129 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10130 &dconstm1
, NULL
, false);
10133 CASE_FLT_FN (BUILT_IN_J0
):
10134 if (validate_arg (arg0
, REAL_TYPE
))
10135 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10139 CASE_FLT_FN (BUILT_IN_J1
):
10140 if (validate_arg (arg0
, REAL_TYPE
))
10141 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10145 CASE_FLT_FN (BUILT_IN_Y0
):
10146 if (validate_arg (arg0
, REAL_TYPE
))
10147 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10148 &dconst0
, NULL
, false);
10151 CASE_FLT_FN (BUILT_IN_Y1
):
10152 if (validate_arg (arg0
, REAL_TYPE
))
10153 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10154 &dconst0
, NULL
, false);
10157 CASE_FLT_FN (BUILT_IN_NAN
):
10158 case BUILT_IN_NAND32
:
10159 case BUILT_IN_NAND64
:
10160 case BUILT_IN_NAND128
:
10161 return fold_builtin_nan (arg0
, type
, true);
10163 CASE_FLT_FN (BUILT_IN_NANS
):
10164 return fold_builtin_nan (arg0
, type
, false);
10166 CASE_FLT_FN (BUILT_IN_FLOOR
):
10167 return fold_builtin_floor (loc
, fndecl
, arg0
);
10169 CASE_FLT_FN (BUILT_IN_CEIL
):
10170 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10172 CASE_FLT_FN (BUILT_IN_TRUNC
):
10173 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10175 CASE_FLT_FN (BUILT_IN_ROUND
):
10176 return fold_builtin_round (loc
, fndecl
, arg0
);
10178 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10179 CASE_FLT_FN (BUILT_IN_RINT
):
10180 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10182 CASE_FLT_FN (BUILT_IN_ICEIL
):
10183 CASE_FLT_FN (BUILT_IN_LCEIL
):
10184 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10185 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10186 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10187 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10188 CASE_FLT_FN (BUILT_IN_IROUND
):
10189 CASE_FLT_FN (BUILT_IN_LROUND
):
10190 CASE_FLT_FN (BUILT_IN_LLROUND
):
10191 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10193 CASE_FLT_FN (BUILT_IN_IRINT
):
10194 CASE_FLT_FN (BUILT_IN_LRINT
):
10195 CASE_FLT_FN (BUILT_IN_LLRINT
):
10196 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10198 case BUILT_IN_BSWAP16
:
10199 case BUILT_IN_BSWAP32
:
10200 case BUILT_IN_BSWAP64
:
10201 return fold_builtin_bswap (fndecl
, arg0
);
10203 CASE_INT_FN (BUILT_IN_FFS
):
10204 CASE_INT_FN (BUILT_IN_CLZ
):
10205 CASE_INT_FN (BUILT_IN_CTZ
):
10206 CASE_INT_FN (BUILT_IN_CLRSB
):
10207 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10208 CASE_INT_FN (BUILT_IN_PARITY
):
10209 return fold_builtin_bitop (fndecl
, arg0
);
10211 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10212 return fold_builtin_signbit (loc
, arg0
, type
);
10214 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10215 return fold_builtin_significand (loc
, arg0
, type
);
10217 CASE_FLT_FN (BUILT_IN_ILOGB
):
10218 CASE_FLT_FN (BUILT_IN_LOGB
):
10219 return fold_builtin_logb (loc
, arg0
, type
);
10221 case BUILT_IN_ISASCII
:
10222 return fold_builtin_isascii (loc
, arg0
);
10224 case BUILT_IN_TOASCII
:
10225 return fold_builtin_toascii (loc
, arg0
);
10227 case BUILT_IN_ISDIGIT
:
10228 return fold_builtin_isdigit (loc
, arg0
);
10230 CASE_FLT_FN (BUILT_IN_FINITE
):
10231 case BUILT_IN_FINITED32
:
10232 case BUILT_IN_FINITED64
:
10233 case BUILT_IN_FINITED128
:
10234 case BUILT_IN_ISFINITE
:
10236 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10239 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10242 CASE_FLT_FN (BUILT_IN_ISINF
):
10243 case BUILT_IN_ISINFD32
:
10244 case BUILT_IN_ISINFD64
:
10245 case BUILT_IN_ISINFD128
:
10247 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10250 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10253 case BUILT_IN_ISNORMAL
:
10254 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10256 case BUILT_IN_ISINF_SIGN
:
10257 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10259 CASE_FLT_FN (BUILT_IN_ISNAN
):
10260 case BUILT_IN_ISNAND32
:
10261 case BUILT_IN_ISNAND64
:
10262 case BUILT_IN_ISNAND128
:
10263 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10265 case BUILT_IN_FREE
:
10266 if (integer_zerop (arg0
))
10267 return build_empty_stmt (loc
);
10278 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10279 This function returns NULL_TREE if no simplification was possible. */
10282 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10284 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10285 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10289 CASE_FLT_FN (BUILT_IN_JN
):
10290 if (validate_arg (arg0
, INTEGER_TYPE
)
10291 && validate_arg (arg1
, REAL_TYPE
))
10292 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10295 CASE_FLT_FN (BUILT_IN_YN
):
10296 if (validate_arg (arg0
, INTEGER_TYPE
)
10297 && validate_arg (arg1
, REAL_TYPE
))
10298 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10302 CASE_FLT_FN (BUILT_IN_DREM
):
10303 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10304 if (validate_arg (arg0
, REAL_TYPE
)
10305 && validate_arg (arg1
, REAL_TYPE
))
10306 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10309 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10310 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10311 if (validate_arg (arg0
, REAL_TYPE
)
10312 && validate_arg (arg1
, POINTER_TYPE
))
10313 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10316 CASE_FLT_FN (BUILT_IN_ATAN2
):
10317 if (validate_arg (arg0
, REAL_TYPE
)
10318 && validate_arg (arg1
, REAL_TYPE
))
10319 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10322 CASE_FLT_FN (BUILT_IN_FDIM
):
10323 if (validate_arg (arg0
, REAL_TYPE
)
10324 && validate_arg (arg1
, REAL_TYPE
))
10325 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10328 CASE_FLT_FN (BUILT_IN_HYPOT
):
10329 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10331 CASE_FLT_FN (BUILT_IN_CPOW
):
10332 if (validate_arg (arg0
, COMPLEX_TYPE
)
10333 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10334 && validate_arg (arg1
, COMPLEX_TYPE
)
10335 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10336 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10339 CASE_FLT_FN (BUILT_IN_LDEXP
):
10340 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10341 CASE_FLT_FN (BUILT_IN_SCALBN
):
10342 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10343 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10344 type
, /*ldexp=*/false);
10346 CASE_FLT_FN (BUILT_IN_FREXP
):
10347 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10349 CASE_FLT_FN (BUILT_IN_MODF
):
10350 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10352 case BUILT_IN_STRSTR
:
10353 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10355 case BUILT_IN_STRSPN
:
10356 return fold_builtin_strspn (loc
, arg0
, arg1
);
10358 case BUILT_IN_STRCSPN
:
10359 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10361 case BUILT_IN_STRCHR
:
10362 case BUILT_IN_INDEX
:
10363 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10365 case BUILT_IN_STRRCHR
:
10366 case BUILT_IN_RINDEX
:
10367 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10369 case BUILT_IN_STRCMP
:
10370 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10372 case BUILT_IN_STRPBRK
:
10373 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10375 case BUILT_IN_EXPECT
:
10376 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10378 CASE_FLT_FN (BUILT_IN_POW
):
10379 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10381 CASE_FLT_FN (BUILT_IN_POWI
):
10382 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10384 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10385 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10387 CASE_FLT_FN (BUILT_IN_FMIN
):
10388 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10390 CASE_FLT_FN (BUILT_IN_FMAX
):
10391 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10393 case BUILT_IN_ISGREATER
:
10394 return fold_builtin_unordered_cmp (loc
, fndecl
,
10395 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10396 case BUILT_IN_ISGREATEREQUAL
:
10397 return fold_builtin_unordered_cmp (loc
, fndecl
,
10398 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10399 case BUILT_IN_ISLESS
:
10400 return fold_builtin_unordered_cmp (loc
, fndecl
,
10401 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10402 case BUILT_IN_ISLESSEQUAL
:
10403 return fold_builtin_unordered_cmp (loc
, fndecl
,
10404 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10405 case BUILT_IN_ISLESSGREATER
:
10406 return fold_builtin_unordered_cmp (loc
, fndecl
,
10407 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10408 case BUILT_IN_ISUNORDERED
:
10409 return fold_builtin_unordered_cmp (loc
, fndecl
,
10410 arg0
, arg1
, UNORDERED_EXPR
,
10413 /* We do the folding for va_start in the expander. */
10414 case BUILT_IN_VA_START
:
10417 case BUILT_IN_OBJECT_SIZE
:
10418 return fold_builtin_object_size (arg0
, arg1
);
10420 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10421 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10423 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10424 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10432 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10434 This function returns NULL_TREE if no simplification was possible. */
10437 fold_builtin_3 (location_t loc
, tree fndecl
,
10438 tree arg0
, tree arg1
, tree arg2
)
10440 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10441 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10445 CASE_FLT_FN (BUILT_IN_SINCOS
):
10446 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10448 CASE_FLT_FN (BUILT_IN_FMA
):
10449 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10452 CASE_FLT_FN (BUILT_IN_REMQUO
):
10453 if (validate_arg (arg0
, REAL_TYPE
)
10454 && validate_arg (arg1
, REAL_TYPE
)
10455 && validate_arg (arg2
, POINTER_TYPE
))
10456 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10459 case BUILT_IN_STRNCMP
:
10460 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10462 case BUILT_IN_MEMCHR
:
10463 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10465 case BUILT_IN_BCMP
:
10466 case BUILT_IN_MEMCMP
:
10467 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10469 case BUILT_IN_EXPECT
:
10470 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10472 case BUILT_IN_ADD_OVERFLOW
:
10473 case BUILT_IN_SUB_OVERFLOW
:
10474 case BUILT_IN_MUL_OVERFLOW
:
10475 case BUILT_IN_SADD_OVERFLOW
:
10476 case BUILT_IN_SADDL_OVERFLOW
:
10477 case BUILT_IN_SADDLL_OVERFLOW
:
10478 case BUILT_IN_SSUB_OVERFLOW
:
10479 case BUILT_IN_SSUBL_OVERFLOW
:
10480 case BUILT_IN_SSUBLL_OVERFLOW
:
10481 case BUILT_IN_SMUL_OVERFLOW
:
10482 case BUILT_IN_SMULL_OVERFLOW
:
10483 case BUILT_IN_SMULLL_OVERFLOW
:
10484 case BUILT_IN_UADD_OVERFLOW
:
10485 case BUILT_IN_UADDL_OVERFLOW
:
10486 case BUILT_IN_UADDLL_OVERFLOW
:
10487 case BUILT_IN_USUB_OVERFLOW
:
10488 case BUILT_IN_USUBL_OVERFLOW
:
10489 case BUILT_IN_USUBLL_OVERFLOW
:
10490 case BUILT_IN_UMUL_OVERFLOW
:
10491 case BUILT_IN_UMULL_OVERFLOW
:
10492 case BUILT_IN_UMULLL_OVERFLOW
:
10493 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10501 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10502 arguments. IGNORE is true if the result of the
10503 function call is ignored. This function returns NULL_TREE if no
10504 simplification was possible. */
10507 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10509 tree ret
= NULL_TREE
;
10514 ret
= fold_builtin_0 (loc
, fndecl
);
10517 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10520 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10523 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10526 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10531 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10532 SET_EXPR_LOCATION (ret
, loc
);
10533 TREE_NO_WARNING (ret
) = 1;
10539 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10540 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10541 of arguments in ARGS to be omitted. OLDNARGS is the number of
10542 elements in ARGS. */
10545 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10546 int skip
, tree fndecl
, int n
, va_list newargs
)
10548 int nargs
= oldnargs
- skip
+ n
;
10555 buffer
= XALLOCAVEC (tree
, nargs
);
10556 for (i
= 0; i
< n
; i
++)
10557 buffer
[i
] = va_arg (newargs
, tree
);
10558 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10559 buffer
[i
] = args
[j
];
10562 buffer
= args
+ skip
;
10564 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10567 /* Return true if FNDECL shouldn't be folded right now.
10568 If a built-in function has an inline attribute always_inline
10569 wrapper, defer folding it after always_inline functions have
10570 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10571 might not be performed. */
10574 avoid_folding_inline_builtin (tree fndecl
)
10576 return (DECL_DECLARED_INLINE_P (fndecl
)
10577 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10579 && !cfun
->always_inline_functions_inlined
10580 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10583 /* A wrapper function for builtin folding that prevents warnings for
10584 "statement without effect" and the like, caused by removing the
10585 call node earlier than the warning is generated. */
10588 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10590 tree ret
= NULL_TREE
;
10591 tree fndecl
= get_callee_fndecl (exp
);
10593 && TREE_CODE (fndecl
) == FUNCTION_DECL
10594 && DECL_BUILT_IN (fndecl
)
10595 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10596 yet. Defer folding until we see all the arguments
10597 (after inlining). */
10598 && !CALL_EXPR_VA_ARG_PACK (exp
))
10600 int nargs
= call_expr_nargs (exp
);
10602 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10603 instead last argument is __builtin_va_arg_pack (). Defer folding
10604 even in that case, until arguments are finalized. */
10605 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10607 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10609 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10610 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10611 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10615 if (avoid_folding_inline_builtin (fndecl
))
10618 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10619 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10620 CALL_EXPR_ARGP (exp
), ignore
);
10623 tree
*args
= CALL_EXPR_ARGP (exp
);
10624 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10632 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10633 N arguments are passed in the array ARGARRAY. Return a folded
10634 expression or NULL_TREE if no simplification was possible. */
10637 fold_builtin_call_array (location_t loc
, tree
,
10642 if (TREE_CODE (fn
) != ADDR_EXPR
)
10645 tree fndecl
= TREE_OPERAND (fn
, 0);
10646 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10647 && DECL_BUILT_IN (fndecl
))
10649 /* If last argument is __builtin_va_arg_pack (), arguments to this
10650 function are not finalized yet. Defer folding until they are. */
10651 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10653 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10655 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10656 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10657 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10660 if (avoid_folding_inline_builtin (fndecl
))
10662 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10663 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10665 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10671 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10672 along with N new arguments specified as the "..." parameters. SKIP
10673 is the number of arguments in EXP to be omitted. This function is used
10674 to do varargs-to-varargs transformations. */
10677 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10683 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10684 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10690 /* Validate a single argument ARG against a tree code CODE representing
10694 validate_arg (const_tree arg
, enum tree_code code
)
10698 else if (code
== POINTER_TYPE
)
10699 return POINTER_TYPE_P (TREE_TYPE (arg
));
10700 else if (code
== INTEGER_TYPE
)
10701 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10702 return code
== TREE_CODE (TREE_TYPE (arg
));
10705 /* This function validates the types of a function call argument list
10706 against a specified list of tree_codes. If the last specifier is a 0,
10707 that represents an ellipses, otherwise the last specifier must be a
10710 This is the GIMPLE version of validate_arglist. Eventually we want to
10711 completely convert builtins.c to work from GIMPLEs and the tree based
10712 validate_arglist will then be removed. */
10715 validate_gimple_arglist (const gcall
*call
, ...)
10717 enum tree_code code
;
10723 va_start (ap
, call
);
10728 code
= (enum tree_code
) va_arg (ap
, int);
10732 /* This signifies an ellipses, any further arguments are all ok. */
10736 /* This signifies an endlink, if no arguments remain, return
10737 true, otherwise return false. */
10738 res
= (i
== gimple_call_num_args (call
));
10741 /* If no parameters remain or the parameter's code does not
10742 match the specified code, return false. Otherwise continue
10743 checking any remaining arguments. */
10744 arg
= gimple_call_arg (call
, i
++);
10745 if (!validate_arg (arg
, code
))
10752 /* We need gotos here since we can only have one VA_CLOSE in a
10760 /* Default target-specific builtin expander that does nothing. */
10763 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10764 rtx target ATTRIBUTE_UNUSED
,
10765 rtx subtarget ATTRIBUTE_UNUSED
,
10766 machine_mode mode ATTRIBUTE_UNUSED
,
10767 int ignore ATTRIBUTE_UNUSED
)
10772 /* Returns true is EXP represents data that would potentially reside
10773 in a readonly section. */
10776 readonly_data_expr (tree exp
)
10780 if (TREE_CODE (exp
) != ADDR_EXPR
)
10783 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10787 /* Make sure we call decl_readonly_section only for trees it
10788 can handle (since it returns true for everything it doesn't
10790 if (TREE_CODE (exp
) == STRING_CST
10791 || TREE_CODE (exp
) == CONSTRUCTOR
10792 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10793 return decl_readonly_section (exp
, 0);
10798 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10799 to the call, and TYPE is its return type.
10801 Return NULL_TREE if no simplification was possible, otherwise return the
10802 simplified form of the call as a tree.
10804 The simplified form may be a constant or other expression which
10805 computes the same value, but in a more efficient manner (including
10806 calls to other builtin functions).
10808 The call may contain arguments which need to be evaluated, but
10809 which are not useful to determine the result of the call. In
10810 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10811 COMPOUND_EXPR will be an argument which must be evaluated.
10812 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10813 COMPOUND_EXPR in the chain will contain the tree for the simplified
10814 form of the builtin function call. */
10817 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10819 if (!validate_arg (s1
, POINTER_TYPE
)
10820 || !validate_arg (s2
, POINTER_TYPE
))
10825 const char *p1
, *p2
;
10827 p2
= c_getstr (s2
);
10831 p1
= c_getstr (s1
);
10834 const char *r
= strstr (p1
, p2
);
10838 return build_int_cst (TREE_TYPE (s1
), 0);
10840 /* Return an offset into the constant string argument. */
10841 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10842 return fold_convert_loc (loc
, type
, tem
);
10845 /* The argument is const char *, and the result is char *, so we need
10846 a type conversion here to avoid a warning. */
10848 return fold_convert_loc (loc
, type
, s1
);
10853 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10857 /* New argument list transforming strstr(s1, s2) to
10858 strchr(s1, s2[0]). */
10859 return build_call_expr_loc (loc
, fn
, 2, s1
,
10860 build_int_cst (integer_type_node
, p2
[0]));
10864 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10865 the call, and TYPE is its return type.
10867 Return NULL_TREE if no simplification was possible, otherwise return the
10868 simplified form of the call as a tree.
10870 The simplified form may be a constant or other expression which
10871 computes the same value, but in a more efficient manner (including
10872 calls to other builtin functions).
10874 The call may contain arguments which need to be evaluated, but
10875 which are not useful to determine the result of the call. In
10876 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10877 COMPOUND_EXPR will be an argument which must be evaluated.
10878 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10879 COMPOUND_EXPR in the chain will contain the tree for the simplified
10880 form of the builtin function call. */
10883 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10885 if (!validate_arg (s1
, POINTER_TYPE
)
10886 || !validate_arg (s2
, INTEGER_TYPE
))
10892 if (TREE_CODE (s2
) != INTEGER_CST
)
10895 p1
= c_getstr (s1
);
10902 if (target_char_cast (s2
, &c
))
10905 r
= strchr (p1
, c
);
10908 return build_int_cst (TREE_TYPE (s1
), 0);
10910 /* Return an offset into the constant string argument. */
10911 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10912 return fold_convert_loc (loc
, type
, tem
);
10918 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10919 the call, and TYPE is its return type.
10921 Return NULL_TREE if no simplification was possible, otherwise return the
10922 simplified form of the call as a tree.
10924 The simplified form may be a constant or other expression which
10925 computes the same value, but in a more efficient manner (including
10926 calls to other builtin functions).
10928 The call may contain arguments which need to be evaluated, but
10929 which are not useful to determine the result of the call. In
10930 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10931 COMPOUND_EXPR will be an argument which must be evaluated.
10932 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10933 COMPOUND_EXPR in the chain will contain the tree for the simplified
10934 form of the builtin function call. */
10937 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10939 if (!validate_arg (s1
, POINTER_TYPE
)
10940 || !validate_arg (s2
, INTEGER_TYPE
))
10947 if (TREE_CODE (s2
) != INTEGER_CST
)
10950 p1
= c_getstr (s1
);
10957 if (target_char_cast (s2
, &c
))
10960 r
= strrchr (p1
, c
);
10963 return build_int_cst (TREE_TYPE (s1
), 0);
10965 /* Return an offset into the constant string argument. */
10966 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10967 return fold_convert_loc (loc
, type
, tem
);
10970 if (! integer_zerop (s2
))
10973 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10977 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10978 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10982 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10983 to the call, and TYPE is its return type.
10985 Return NULL_TREE if no simplification was possible, otherwise return the
10986 simplified form of the call as a tree.
10988 The simplified form may be a constant or other expression which
10989 computes the same value, but in a more efficient manner (including
10990 calls to other builtin functions).
10992 The call may contain arguments which need to be evaluated, but
10993 which are not useful to determine the result of the call. In
10994 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10995 COMPOUND_EXPR will be an argument which must be evaluated.
10996 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10997 COMPOUND_EXPR in the chain will contain the tree for the simplified
10998 form of the builtin function call. */
11001 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11003 if (!validate_arg (s1
, POINTER_TYPE
)
11004 || !validate_arg (s2
, POINTER_TYPE
))
11009 const char *p1
, *p2
;
11011 p2
= c_getstr (s2
);
11015 p1
= c_getstr (s1
);
11018 const char *r
= strpbrk (p1
, p2
);
11022 return build_int_cst (TREE_TYPE (s1
), 0);
11024 /* Return an offset into the constant string argument. */
11025 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11026 return fold_convert_loc (loc
, type
, tem
);
11030 /* strpbrk(x, "") == NULL.
11031 Evaluate and ignore s1 in case it had side-effects. */
11032 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11035 return NULL_TREE
; /* Really call strpbrk. */
11037 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11041 /* New argument list transforming strpbrk(s1, s2) to
11042 strchr(s1, s2[0]). */
11043 return build_call_expr_loc (loc
, fn
, 2, s1
,
11044 build_int_cst (integer_type_node
, p2
[0]));
11048 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11051 Return NULL_TREE if no simplification was possible, otherwise return the
11052 simplified form of the call as a tree.
11054 The simplified form may be a constant or other expression which
11055 computes the same value, but in a more efficient manner (including
11056 calls to other builtin functions).
11058 The call may contain arguments which need to be evaluated, but
11059 which are not useful to determine the result of the call. In
11060 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11061 COMPOUND_EXPR will be an argument which must be evaluated.
11062 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11063 COMPOUND_EXPR in the chain will contain the tree for the simplified
11064 form of the builtin function call. */
11067 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11069 if (!validate_arg (s1
, POINTER_TYPE
)
11070 || !validate_arg (s2
, POINTER_TYPE
))
11074 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11076 /* If both arguments are constants, evaluate at compile-time. */
11079 const size_t r
= strspn (p1
, p2
);
11080 return build_int_cst (size_type_node
, r
);
11083 /* If either argument is "", return NULL_TREE. */
11084 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11085 /* Evaluate and ignore both arguments in case either one has
11087 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11093 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11096 Return NULL_TREE if no simplification was possible, otherwise return the
11097 simplified form of the call as a tree.
11099 The simplified form may be a constant or other expression which
11100 computes the same value, but in a more efficient manner (including
11101 calls to other builtin functions).
11103 The call may contain arguments which need to be evaluated, but
11104 which are not useful to determine the result of the call. In
11105 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11106 COMPOUND_EXPR will be an argument which must be evaluated.
11107 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11108 COMPOUND_EXPR in the chain will contain the tree for the simplified
11109 form of the builtin function call. */
11112 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11114 if (!validate_arg (s1
, POINTER_TYPE
)
11115 || !validate_arg (s2
, POINTER_TYPE
))
11119 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11121 /* If both arguments are constants, evaluate at compile-time. */
11124 const size_t r
= strcspn (p1
, p2
);
11125 return build_int_cst (size_type_node
, r
);
11128 /* If the first argument is "", return NULL_TREE. */
11129 if (p1
&& *p1
== '\0')
11131 /* Evaluate and ignore argument s2 in case it has
11133 return omit_one_operand_loc (loc
, size_type_node
,
11134 size_zero_node
, s2
);
11137 /* If the second argument is "", return __builtin_strlen(s1). */
11138 if (p2
&& *p2
== '\0')
11140 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11142 /* If the replacement _DECL isn't initialized, don't do the
11147 return build_call_expr_loc (loc
, fn
, 1, s1
);
11153 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11154 produced. False otherwise. This is done so that we don't output the error
11155 or warning twice or three times. */
11158 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11160 tree fntype
= TREE_TYPE (current_function_decl
);
11161 int nargs
= call_expr_nargs (exp
);
11163 /* There is good chance the current input_location points inside the
11164 definition of the va_start macro (perhaps on the token for
11165 builtin) in a system header, so warnings will not be emitted.
11166 Use the location in real source code. */
11167 source_location current_location
=
11168 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11171 if (!stdarg_p (fntype
))
11173 error ("%<va_start%> used in function with fixed args");
11179 if (va_start_p
&& (nargs
!= 2))
11181 error ("wrong number of arguments to function %<va_start%>");
11184 arg
= CALL_EXPR_ARG (exp
, 1);
11186 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11187 when we checked the arguments and if needed issued a warning. */
11192 /* Evidently an out of date version of <stdarg.h>; can't validate
11193 va_start's second argument, but can still work as intended. */
11194 warning_at (current_location
,
11196 "%<__builtin_next_arg%> called without an argument");
11199 else if (nargs
> 1)
11201 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11204 arg
= CALL_EXPR_ARG (exp
, 0);
11207 if (TREE_CODE (arg
) == SSA_NAME
)
11208 arg
= SSA_NAME_VAR (arg
);
11210 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11211 or __builtin_next_arg (0) the first time we see it, after checking
11212 the arguments and if needed issuing a warning. */
11213 if (!integer_zerop (arg
))
11215 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11217 /* Strip off all nops for the sake of the comparison. This
11218 is not quite the same as STRIP_NOPS. It does more.
11219 We must also strip off INDIRECT_EXPR for C++ reference
11221 while (CONVERT_EXPR_P (arg
)
11222 || TREE_CODE (arg
) == INDIRECT_REF
)
11223 arg
= TREE_OPERAND (arg
, 0);
11224 if (arg
!= last_parm
)
11226 /* FIXME: Sometimes with the tree optimizers we can get the
11227 not the last argument even though the user used the last
11228 argument. We just warn and set the arg to be the last
11229 argument so that we will get wrong-code because of
11231 warning_at (current_location
,
11233 "second parameter of %<va_start%> not last named argument");
11236 /* Undefined by C99 7.15.1.4p4 (va_start):
11237 "If the parameter parmN is declared with the register storage
11238 class, with a function or array type, or with a type that is
11239 not compatible with the type that results after application of
11240 the default argument promotions, the behavior is undefined."
11242 else if (DECL_REGISTER (arg
))
11244 warning_at (current_location
,
11246 "undefined behaviour when second parameter of "
11247 "%<va_start%> is declared with %<register%> storage");
11250 /* We want to verify the second parameter just once before the tree
11251 optimizers are run and then avoid keeping it in the tree,
11252 as otherwise we could warn even for correct code like:
11253 void foo (int i, ...)
11254 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11256 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11258 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11264 /* Expand a call EXP to __builtin_object_size. */
11267 expand_builtin_object_size (tree exp
)
11270 int object_size_type
;
11271 tree fndecl
= get_callee_fndecl (exp
);
11273 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11275 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11277 expand_builtin_trap ();
11281 ost
= CALL_EXPR_ARG (exp
, 1);
11284 if (TREE_CODE (ost
) != INTEGER_CST
11285 || tree_int_cst_sgn (ost
) < 0
11286 || compare_tree_int (ost
, 3) > 0)
11288 error ("%Klast argument of %D is not integer constant between 0 and 3",
11290 expand_builtin_trap ();
11294 object_size_type
= tree_to_shwi (ost
);
11296 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11299 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11300 FCODE is the BUILT_IN_* to use.
11301 Return NULL_RTX if we failed; the caller should emit a normal call,
11302 otherwise try to get the result in TARGET, if convenient (and in
11303 mode MODE if that's convenient). */
11306 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11307 enum built_in_function fcode
)
11309 tree dest
, src
, len
, size
;
11311 if (!validate_arglist (exp
,
11313 fcode
== BUILT_IN_MEMSET_CHK
11314 ? INTEGER_TYPE
: POINTER_TYPE
,
11315 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11318 dest
= CALL_EXPR_ARG (exp
, 0);
11319 src
= CALL_EXPR_ARG (exp
, 1);
11320 len
= CALL_EXPR_ARG (exp
, 2);
11321 size
= CALL_EXPR_ARG (exp
, 3);
11323 if (! tree_fits_uhwi_p (size
))
11326 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11330 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11332 warning_at (tree_nonartificial_location (exp
),
11333 0, "%Kcall to %D will always overflow destination buffer",
11334 exp
, get_callee_fndecl (exp
));
11339 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11340 mem{cpy,pcpy,move,set} is available. */
11343 case BUILT_IN_MEMCPY_CHK
:
11344 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11346 case BUILT_IN_MEMPCPY_CHK
:
11347 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11349 case BUILT_IN_MEMMOVE_CHK
:
11350 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11352 case BUILT_IN_MEMSET_CHK
:
11353 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11362 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11363 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11364 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11365 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11367 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11371 unsigned int dest_align
= get_pointer_alignment (dest
);
11373 /* If DEST is not a pointer type, call the normal function. */
11374 if (dest_align
== 0)
11377 /* If SRC and DEST are the same (and not volatile), do nothing. */
11378 if (operand_equal_p (src
, dest
, 0))
11382 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11384 /* Evaluate and ignore LEN in case it has side-effects. */
11385 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11386 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11389 expr
= fold_build_pointer_plus (dest
, len
);
11390 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11393 /* __memmove_chk special case. */
11394 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11396 unsigned int src_align
= get_pointer_alignment (src
);
11398 if (src_align
== 0)
11401 /* If src is categorized for a readonly section we can use
11402 normal __memcpy_chk. */
11403 if (readonly_data_expr (src
))
11405 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11408 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11409 dest
, src
, len
, size
);
11410 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11411 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11412 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11419 /* Emit warning if a buffer overflow is detected at compile time. */
11422 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11426 location_t loc
= tree_nonartificial_location (exp
);
11430 case BUILT_IN_STRCPY_CHK
:
11431 case BUILT_IN_STPCPY_CHK
:
11432 /* For __strcat_chk the warning will be emitted only if overflowing
11433 by at least strlen (dest) + 1 bytes. */
11434 case BUILT_IN_STRCAT_CHK
:
11435 len
= CALL_EXPR_ARG (exp
, 1);
11436 size
= CALL_EXPR_ARG (exp
, 2);
11439 case BUILT_IN_STRNCAT_CHK
:
11440 case BUILT_IN_STRNCPY_CHK
:
11441 case BUILT_IN_STPNCPY_CHK
:
11442 len
= CALL_EXPR_ARG (exp
, 2);
11443 size
= CALL_EXPR_ARG (exp
, 3);
11445 case BUILT_IN_SNPRINTF_CHK
:
11446 case BUILT_IN_VSNPRINTF_CHK
:
11447 len
= CALL_EXPR_ARG (exp
, 1);
11448 size
= CALL_EXPR_ARG (exp
, 3);
11451 gcc_unreachable ();
11457 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11462 len
= c_strlen (len
, 1);
11463 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11466 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11468 tree src
= CALL_EXPR_ARG (exp
, 1);
11469 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11471 src
= c_strlen (src
, 1);
11472 if (! src
|| ! tree_fits_uhwi_p (src
))
11474 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11475 exp
, get_callee_fndecl (exp
));
11478 else if (tree_int_cst_lt (src
, size
))
11481 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11484 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11485 exp
, get_callee_fndecl (exp
));
11488 /* Emit warning if a buffer overflow is detected at compile time
11489 in __sprintf_chk/__vsprintf_chk calls. */
11492 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11494 tree size
, len
, fmt
;
11495 const char *fmt_str
;
11496 int nargs
= call_expr_nargs (exp
);
11498 /* Verify the required arguments in the original call. */
11502 size
= CALL_EXPR_ARG (exp
, 2);
11503 fmt
= CALL_EXPR_ARG (exp
, 3);
11505 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11508 /* Check whether the format is a literal string constant. */
11509 fmt_str
= c_getstr (fmt
);
11510 if (fmt_str
== NULL
)
11513 if (!init_target_chars ())
11516 /* If the format doesn't contain % args or %%, we know its size. */
11517 if (strchr (fmt_str
, target_percent
) == 0)
11518 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11519 /* If the format is "%s" and first ... argument is a string literal,
11521 else if (fcode
== BUILT_IN_SPRINTF_CHK
11522 && strcmp (fmt_str
, target_percent_s
) == 0)
11528 arg
= CALL_EXPR_ARG (exp
, 4);
11529 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11532 len
= c_strlen (arg
, 1);
11533 if (!len
|| ! tree_fits_uhwi_p (len
))
11539 if (! tree_int_cst_lt (len
, size
))
11540 warning_at (tree_nonartificial_location (exp
),
11541 0, "%Kcall to %D will always overflow destination buffer",
11542 exp
, get_callee_fndecl (exp
));
11545 /* Emit warning if a free is called with address of a variable. */
11548 maybe_emit_free_warning (tree exp
)
11550 tree arg
= CALL_EXPR_ARG (exp
, 0);
11553 if (TREE_CODE (arg
) != ADDR_EXPR
)
11556 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11557 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11560 if (SSA_VAR_P (arg
))
11561 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11562 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11564 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11565 "%Kattempt to free a non-heap object", exp
);
11568 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11572 fold_builtin_object_size (tree ptr
, tree ost
)
11574 unsigned HOST_WIDE_INT bytes
;
11575 int object_size_type
;
11577 if (!validate_arg (ptr
, POINTER_TYPE
)
11578 || !validate_arg (ost
, INTEGER_TYPE
))
11583 if (TREE_CODE (ost
) != INTEGER_CST
11584 || tree_int_cst_sgn (ost
) < 0
11585 || compare_tree_int (ost
, 3) > 0)
11588 object_size_type
= tree_to_shwi (ost
);
11590 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11591 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11592 and (size_t) 0 for types 2 and 3. */
11593 if (TREE_SIDE_EFFECTS (ptr
))
11594 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11596 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11598 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11599 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11600 return build_int_cstu (size_type_node
, bytes
);
11602 else if (TREE_CODE (ptr
) == SSA_NAME
)
11604 /* If object size is not known yet, delay folding until
11605 later. Maybe subsequent passes will help determining
11607 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11608 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11609 && wi::fits_to_tree_p (bytes
, size_type_node
))
11610 return build_int_cstu (size_type_node
, bytes
);
11616 /* Builtins with folding operations that operate on "..." arguments
11617 need special handling; we need to store the arguments in a convenient
11618 data structure before attempting any folding. Fortunately there are
11619 only a few builtins that fall into this category. FNDECL is the
11620 function, EXP is the CALL_EXPR for the call. */
11623 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11625 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11626 tree ret
= NULL_TREE
;
11630 case BUILT_IN_FPCLASSIFY
:
11631 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11639 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11640 SET_EXPR_LOCATION (ret
, loc
);
11641 TREE_NO_WARNING (ret
) = 1;
11647 /* Initialize format string characters in the target charset. */
11650 init_target_chars (void)
11655 target_newline
= lang_hooks
.to_target_charset ('\n');
11656 target_percent
= lang_hooks
.to_target_charset ('%');
11657 target_c
= lang_hooks
.to_target_charset ('c');
11658 target_s
= lang_hooks
.to_target_charset ('s');
11659 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11663 target_percent_c
[0] = target_percent
;
11664 target_percent_c
[1] = target_c
;
11665 target_percent_c
[2] = '\0';
11667 target_percent_s
[0] = target_percent
;
11668 target_percent_s
[1] = target_s
;
11669 target_percent_s
[2] = '\0';
11671 target_percent_s_newline
[0] = target_percent
;
11672 target_percent_s_newline
[1] = target_s
;
11673 target_percent_s_newline
[2] = target_newline
;
11674 target_percent_s_newline
[3] = '\0';
11681 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11682 and no overflow/underflow occurred. INEXACT is true if M was not
11683 exactly calculated. TYPE is the tree type for the result. This
11684 function assumes that you cleared the MPFR flags and then
11685 calculated M to see if anything subsequently set a flag prior to
11686 entering this function. Return NULL_TREE if any checks fail. */
11689 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11691 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11692 overflow/underflow occurred. If -frounding-math, proceed iff the
11693 result of calling FUNC was exact. */
11694 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11695 && (!flag_rounding_math
|| !inexact
))
11697 REAL_VALUE_TYPE rr
;
11699 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11700 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11701 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11702 but the mpft_t is not, then we underflowed in the
11704 if (real_isfinite (&rr
)
11705 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11707 REAL_VALUE_TYPE rmode
;
11709 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11710 /* Proceed iff the specified mode can hold the value. */
11711 if (real_identical (&rmode
, &rr
))
11712 return build_real (type
, rmode
);
11718 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11719 number and no overflow/underflow occurred. INEXACT is true if M
11720 was not exactly calculated. TYPE is the tree type for the result.
11721 This function assumes that you cleared the MPFR flags and then
11722 calculated M to see if anything subsequently set a flag prior to
11723 entering this function. Return NULL_TREE if any checks fail, if
11724 FORCE_CONVERT is true, then bypass the checks. */
11727 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11729 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11730 overflow/underflow occurred. If -frounding-math, proceed iff the
11731 result of calling FUNC was exact. */
11733 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11734 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11735 && (!flag_rounding_math
|| !inexact
)))
11737 REAL_VALUE_TYPE re
, im
;
11739 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11740 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11741 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11742 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11743 but the mpft_t is not, then we underflowed in the
11746 || (real_isfinite (&re
) && real_isfinite (&im
)
11747 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11748 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11750 REAL_VALUE_TYPE re_mode
, im_mode
;
11752 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11753 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11754 /* Proceed iff the specified mode can hold the value. */
11756 || (real_identical (&re_mode
, &re
)
11757 && real_identical (&im_mode
, &im
)))
11758 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11759 build_real (TREE_TYPE (type
), im_mode
));
11765 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11766 FUNC on it and return the resulting value as a tree with type TYPE.
11767 If MIN and/or MAX are not NULL, then the supplied ARG must be
11768 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11769 acceptable values, otherwise they are not. The mpfr precision is
11770 set to the precision of TYPE. We assume that function FUNC returns
11771 zero if the result could be calculated exactly within the requested
11775 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11776 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11779 tree result
= NULL_TREE
;
11783 /* To proceed, MPFR must exactly represent the target floating point
11784 format, which only happens when the target base equals two. */
11785 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11786 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11788 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11790 if (real_isfinite (ra
)
11791 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11792 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11794 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11795 const int prec
= fmt
->p
;
11796 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11800 mpfr_init2 (m
, prec
);
11801 mpfr_from_real (m
, ra
, GMP_RNDN
);
11802 mpfr_clear_flags ();
11803 inexact
= func (m
, m
, rnd
);
11804 result
= do_mpfr_ckconv (m
, type
, inexact
);
11812 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11813 FUNC on it and return the resulting value as a tree with type TYPE.
11814 The mpfr precision is set to the precision of TYPE. We assume that
11815 function FUNC returns zero if the result could be calculated
11816 exactly within the requested precision. */
11819 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11820 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11822 tree result
= NULL_TREE
;
11827 /* To proceed, MPFR must exactly represent the target floating point
11828 format, which only happens when the target base equals two. */
11829 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11830 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11831 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11833 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11834 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11836 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11838 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11839 const int prec
= fmt
->p
;
11840 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11844 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11845 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11846 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11847 mpfr_clear_flags ();
11848 inexact
= func (m1
, m1
, m2
, rnd
);
11849 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11850 mpfr_clears (m1
, m2
, NULL
);
11857 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11858 FUNC on it and return the resulting value as a tree with type TYPE.
11859 The mpfr precision is set to the precision of TYPE. We assume that
11860 function FUNC returns zero if the result could be calculated
11861 exactly within the requested precision. */
11864 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11865 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11867 tree result
= NULL_TREE
;
11873 /* To proceed, MPFR must exactly represent the target floating point
11874 format, which only happens when the target base equals two. */
11875 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11876 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11877 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11878 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11880 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11881 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11882 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11884 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11886 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11887 const int prec
= fmt
->p
;
11888 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11892 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11893 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11894 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11895 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11896 mpfr_clear_flags ();
11897 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11898 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11899 mpfr_clears (m1
, m2
, m3
, NULL
);
11906 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11907 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11908 If ARG_SINP and ARG_COSP are NULL then the result is returned
11909 as a complex value.
11910 The type is taken from the type of ARG and is used for setting the
11911 precision of the calculation and results. */
11914 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11916 tree
const type
= TREE_TYPE (arg
);
11917 tree result
= NULL_TREE
;
11921 /* To proceed, MPFR must exactly represent the target floating point
11922 format, which only happens when the target base equals two. */
11923 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11924 && TREE_CODE (arg
) == REAL_CST
11925 && !TREE_OVERFLOW (arg
))
11927 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11929 if (real_isfinite (ra
))
11931 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11932 const int prec
= fmt
->p
;
11933 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11934 tree result_s
, result_c
;
11938 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11939 mpfr_from_real (m
, ra
, GMP_RNDN
);
11940 mpfr_clear_flags ();
11941 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11942 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11943 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11944 mpfr_clears (m
, ms
, mc
, NULL
);
11945 if (result_s
&& result_c
)
11947 /* If we are to return in a complex value do so. */
11948 if (!arg_sinp
&& !arg_cosp
)
11949 return build_complex (build_complex_type (type
),
11950 result_c
, result_s
);
11952 /* Dereference the sin/cos pointer arguments. */
11953 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11954 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11955 /* Proceed if valid pointer type were passed in. */
11956 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11957 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11959 /* Set the values. */
11960 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11962 TREE_SIDE_EFFECTS (result_s
) = 1;
11963 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11965 TREE_SIDE_EFFECTS (result_c
) = 1;
11966 /* Combine the assignments into a compound expr. */
11967 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11968 result_s
, result_c
));
11976 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11977 two-argument mpfr order N Bessel function FUNC on them and return
11978 the resulting value as a tree with type TYPE. The mpfr precision
11979 is set to the precision of TYPE. We assume that function FUNC
11980 returns zero if the result could be calculated exactly within the
11981 requested precision. */
11983 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
11984 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
11985 const REAL_VALUE_TYPE
*min
, bool inclusive
)
11987 tree result
= NULL_TREE
;
11992 /* To proceed, MPFR must exactly represent the target floating point
11993 format, which only happens when the target base equals two. */
11994 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11995 && tree_fits_shwi_p (arg1
)
11996 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11998 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
11999 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12002 && real_isfinite (ra
)
12003 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12005 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12006 const int prec
= fmt
->p
;
12007 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12011 mpfr_init2 (m
, prec
);
12012 mpfr_from_real (m
, ra
, GMP_RNDN
);
12013 mpfr_clear_flags ();
12014 inexact
= func (m
, n
, m
, rnd
);
12015 result
= do_mpfr_ckconv (m
, type
, inexact
);
12023 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12024 the pointer *(ARG_QUO) and return the result. The type is taken
12025 from the type of ARG0 and is used for setting the precision of the
12026 calculation and results. */
12029 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12031 tree
const type
= TREE_TYPE (arg0
);
12032 tree result
= NULL_TREE
;
12037 /* To proceed, MPFR must exactly represent the target floating point
12038 format, which only happens when the target base equals two. */
12039 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12040 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12041 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12043 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12044 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12046 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12048 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12049 const int prec
= fmt
->p
;
12050 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12055 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12056 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12057 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12058 mpfr_clear_flags ();
12059 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12060 /* Remquo is independent of the rounding mode, so pass
12061 inexact=0 to do_mpfr_ckconv(). */
12062 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12063 mpfr_clears (m0
, m1
, NULL
);
12066 /* MPFR calculates quo in the host's long so it may
12067 return more bits in quo than the target int can hold
12068 if sizeof(host long) > sizeof(target int). This can
12069 happen even for native compilers in LP64 mode. In
12070 these cases, modulo the quo value with the largest
12071 number that the target int can hold while leaving one
12072 bit for the sign. */
12073 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12074 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12076 /* Dereference the quo pointer argument. */
12077 arg_quo
= build_fold_indirect_ref (arg_quo
);
12078 /* Proceed iff a valid pointer type was passed in. */
12079 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12081 /* Set the value. */
12083 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12084 build_int_cst (TREE_TYPE (arg_quo
),
12086 TREE_SIDE_EFFECTS (result_quo
) = 1;
12087 /* Combine the quo assignment with the rem. */
12088 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12089 result_quo
, result_rem
));
12097 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12098 resulting value as a tree with type TYPE. The mpfr precision is
12099 set to the precision of TYPE. We assume that this mpfr function
12100 returns zero if the result could be calculated exactly within the
12101 requested precision. In addition, the integer pointer represented
12102 by ARG_SG will be dereferenced and set to the appropriate signgam
12106 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12108 tree result
= NULL_TREE
;
12112 /* To proceed, MPFR must exactly represent the target floating point
12113 format, which only happens when the target base equals two. Also
12114 verify ARG is a constant and that ARG_SG is an int pointer. */
12115 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12116 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12117 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12118 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12120 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12122 /* In addition to NaN and Inf, the argument cannot be zero or a
12123 negative integer. */
12124 if (real_isfinite (ra
)
12125 && ra
->cl
!= rvc_zero
12126 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12128 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12129 const int prec
= fmt
->p
;
12130 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12135 mpfr_init2 (m
, prec
);
12136 mpfr_from_real (m
, ra
, GMP_RNDN
);
12137 mpfr_clear_flags ();
12138 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12139 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12145 /* Dereference the arg_sg pointer argument. */
12146 arg_sg
= build_fold_indirect_ref (arg_sg
);
12147 /* Assign the signgam value into *arg_sg. */
12148 result_sg
= fold_build2 (MODIFY_EXPR
,
12149 TREE_TYPE (arg_sg
), arg_sg
,
12150 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12151 TREE_SIDE_EFFECTS (result_sg
) = 1;
12152 /* Combine the signgam assignment with the lgamma result. */
12153 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12154 result_sg
, result_lg
));
12162 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12163 function FUNC on it and return the resulting value as a tree with
12164 type TYPE. The mpfr precision is set to the precision of TYPE. We
12165 assume that function FUNC returns zero if the result could be
12166 calculated exactly within the requested precision. */
12169 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12171 tree result
= NULL_TREE
;
12175 /* To proceed, MPFR must exactly represent the target floating point
12176 format, which only happens when the target base equals two. */
12177 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12178 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12179 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12181 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12182 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12184 if (real_isfinite (re
) && real_isfinite (im
))
12186 const struct real_format
*const fmt
=
12187 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12188 const int prec
= fmt
->p
;
12189 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12190 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12194 mpc_init2 (m
, prec
);
12195 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12196 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12197 mpfr_clear_flags ();
12198 inexact
= func (m
, m
, crnd
);
12199 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12207 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12208 mpc function FUNC on it and return the resulting value as a tree
12209 with type TYPE. The mpfr precision is set to the precision of
12210 TYPE. We assume that function FUNC returns zero if the result
12211 could be calculated exactly within the requested precision. If
12212 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12213 in the arguments and/or results. */
12216 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12217 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12219 tree result
= NULL_TREE
;
12224 /* To proceed, MPFR must exactly represent the target floating point
12225 format, which only happens when the target base equals two. */
12226 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12228 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12229 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12230 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12232 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12233 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12234 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12235 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12238 || (real_isfinite (re0
) && real_isfinite (im0
)
12239 && real_isfinite (re1
) && real_isfinite (im1
)))
12241 const struct real_format
*const fmt
=
12242 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12243 const int prec
= fmt
->p
;
12244 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12245 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12249 mpc_init2 (m0
, prec
);
12250 mpc_init2 (m1
, prec
);
12251 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12252 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12253 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12254 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12255 mpfr_clear_flags ();
12256 inexact
= func (m0
, m0
, m1
, crnd
);
12257 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12266 /* A wrapper function for builtin folding that prevents warnings for
12267 "statement without effect" and the like, caused by removing the
12268 call node earlier than the warning is generated. */
12271 fold_call_stmt (gcall
*stmt
, bool ignore
)
12273 tree ret
= NULL_TREE
;
12274 tree fndecl
= gimple_call_fndecl (stmt
);
12275 location_t loc
= gimple_location (stmt
);
12277 && TREE_CODE (fndecl
) == FUNCTION_DECL
12278 && DECL_BUILT_IN (fndecl
)
12279 && !gimple_call_va_arg_pack_p (stmt
))
12281 int nargs
= gimple_call_num_args (stmt
);
12282 tree
*args
= (nargs
> 0
12283 ? gimple_call_arg_ptr (stmt
, 0)
12284 : &error_mark_node
);
12286 if (avoid_folding_inline_builtin (fndecl
))
12288 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12290 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12294 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12297 /* Propagate location information from original call to
12298 expansion of builtin. Otherwise things like
12299 maybe_emit_chk_warning, that operate on the expansion
12300 of a builtin, will use the wrong location information. */
12301 if (gimple_has_location (stmt
))
12303 tree realret
= ret
;
12304 if (TREE_CODE (ret
) == NOP_EXPR
)
12305 realret
= TREE_OPERAND (ret
, 0);
12306 if (CAN_HAVE_LOCATION_P (realret
)
12307 && !EXPR_HAS_LOCATION (realret
))
12308 SET_EXPR_LOCATION (realret
, loc
);
12318 /* Look up the function in builtin_decl that corresponds to DECL
12319 and set ASMSPEC as its user assembler name. DECL must be a
12320 function decl that declares a builtin. */
12323 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12326 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12327 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12330 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12331 set_user_assembler_name (builtin
, asmspec
);
12332 switch (DECL_FUNCTION_CODE (decl
))
12334 case BUILT_IN_MEMCPY
:
12335 init_block_move_fn (asmspec
);
12336 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12338 case BUILT_IN_MEMSET
:
12339 init_block_clear_fn (asmspec
);
12340 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12342 case BUILT_IN_MEMMOVE
:
12343 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12345 case BUILT_IN_MEMCMP
:
12346 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12348 case BUILT_IN_ABORT
:
12349 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12352 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12354 set_user_assembler_libfunc ("ffs", asmspec
);
12355 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12356 MODE_INT
, 0), "ffs");
12364 /* Return true if DECL is a builtin that expands to a constant or similarly
12367 is_simple_builtin (tree decl
)
12369 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12370 switch (DECL_FUNCTION_CODE (decl
))
12372 /* Builtins that expand to constants. */
12373 case BUILT_IN_CONSTANT_P
:
12374 case BUILT_IN_EXPECT
:
12375 case BUILT_IN_OBJECT_SIZE
:
12376 case BUILT_IN_UNREACHABLE
:
12377 /* Simple register moves or loads from stack. */
12378 case BUILT_IN_ASSUME_ALIGNED
:
12379 case BUILT_IN_RETURN_ADDRESS
:
12380 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12381 case BUILT_IN_FROB_RETURN_ADDR
:
12382 case BUILT_IN_RETURN
:
12383 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12384 case BUILT_IN_FRAME_ADDRESS
:
12385 case BUILT_IN_VA_END
:
12386 case BUILT_IN_STACK_SAVE
:
12387 case BUILT_IN_STACK_RESTORE
:
12388 /* Exception state returns or moves registers around. */
12389 case BUILT_IN_EH_FILTER
:
12390 case BUILT_IN_EH_POINTER
:
12391 case BUILT_IN_EH_COPY_VALUES
:
12401 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12402 most probably expanded inline into reasonably simple code. This is a
12403 superset of is_simple_builtin. */
12405 is_inexpensive_builtin (tree decl
)
12409 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12411 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12412 switch (DECL_FUNCTION_CODE (decl
))
12415 case BUILT_IN_ALLOCA
:
12416 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12417 case BUILT_IN_BSWAP16
:
12418 case BUILT_IN_BSWAP32
:
12419 case BUILT_IN_BSWAP64
:
12421 case BUILT_IN_CLZIMAX
:
12422 case BUILT_IN_CLZL
:
12423 case BUILT_IN_CLZLL
:
12425 case BUILT_IN_CTZIMAX
:
12426 case BUILT_IN_CTZL
:
12427 case BUILT_IN_CTZLL
:
12429 case BUILT_IN_FFSIMAX
:
12430 case BUILT_IN_FFSL
:
12431 case BUILT_IN_FFSLL
:
12432 case BUILT_IN_IMAXABS
:
12433 case BUILT_IN_FINITE
:
12434 case BUILT_IN_FINITEF
:
12435 case BUILT_IN_FINITEL
:
12436 case BUILT_IN_FINITED32
:
12437 case BUILT_IN_FINITED64
:
12438 case BUILT_IN_FINITED128
:
12439 case BUILT_IN_FPCLASSIFY
:
12440 case BUILT_IN_ISFINITE
:
12441 case BUILT_IN_ISINF_SIGN
:
12442 case BUILT_IN_ISINF
:
12443 case BUILT_IN_ISINFF
:
12444 case BUILT_IN_ISINFL
:
12445 case BUILT_IN_ISINFD32
:
12446 case BUILT_IN_ISINFD64
:
12447 case BUILT_IN_ISINFD128
:
12448 case BUILT_IN_ISNAN
:
12449 case BUILT_IN_ISNANF
:
12450 case BUILT_IN_ISNANL
:
12451 case BUILT_IN_ISNAND32
:
12452 case BUILT_IN_ISNAND64
:
12453 case BUILT_IN_ISNAND128
:
12454 case BUILT_IN_ISNORMAL
:
12455 case BUILT_IN_ISGREATER
:
12456 case BUILT_IN_ISGREATEREQUAL
:
12457 case BUILT_IN_ISLESS
:
12458 case BUILT_IN_ISLESSEQUAL
:
12459 case BUILT_IN_ISLESSGREATER
:
12460 case BUILT_IN_ISUNORDERED
:
12461 case BUILT_IN_VA_ARG_PACK
:
12462 case BUILT_IN_VA_ARG_PACK_LEN
:
12463 case BUILT_IN_VA_COPY
:
12464 case BUILT_IN_TRAP
:
12465 case BUILT_IN_SAVEREGS
:
12466 case BUILT_IN_POPCOUNTL
:
12467 case BUILT_IN_POPCOUNTLL
:
12468 case BUILT_IN_POPCOUNTIMAX
:
12469 case BUILT_IN_POPCOUNT
:
12470 case BUILT_IN_PARITYL
:
12471 case BUILT_IN_PARITYLL
:
12472 case BUILT_IN_PARITYIMAX
:
12473 case BUILT_IN_PARITY
:
12474 case BUILT_IN_LABS
:
12475 case BUILT_IN_LLABS
:
12476 case BUILT_IN_PREFETCH
:
12480 return is_simple_builtin (decl
);