1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
31 #include "tree-object-size.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
41 #include "hard-reg-set.h"
44 #include "insn-config.h"
50 #include "typeclass.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
64 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
66 struct target_builtins default_target_builtins
;
68 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names
[BUILT_IN_LAST
]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names
[(int) END_BUILTINS
] =
78 #include "builtins.def"
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info
;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p
;
89 static rtx
c_readstr (const char *, enum machine_mode
);
90 static int target_char_cast (tree
, char *);
91 static rtx
get_memory_rtx (tree
, tree
);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx
result_vector (int, rtx
);
97 static void expand_builtin_update_setjmp_buf (rtx
);
98 static void expand_builtin_prefetch (tree
);
99 static rtx
expand_builtin_apply_args (void);
100 static rtx
expand_builtin_apply_args_1 (void);
101 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
102 static void expand_builtin_return (rtx
);
103 static enum type_class
type_to_class (tree
);
104 static rtx
expand_builtin_classify_type (tree
);
105 static void expand_errno_check (tree
, rtx
);
106 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
107 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
111 static rtx
expand_builtin_sincos (tree
);
112 static rtx
expand_builtin_cexpi (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
115 static rtx
expand_builtin_next_arg (void);
116 static rtx
expand_builtin_va_start (tree
);
117 static rtx
expand_builtin_va_end (tree
);
118 static rtx
expand_builtin_va_copy (tree
);
119 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strcmp (tree
, rtx
);
121 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
122 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static rtx
expand_builtin_memcpy (tree
, rtx
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
126 enum machine_mode
, int);
127 static rtx
expand_builtin_strcpy (tree
, rtx
);
128 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
129 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
130 static rtx
expand_builtin_strncpy (tree
, rtx
);
131 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
133 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
134 static rtx
expand_builtin_bzero (tree
);
135 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_alloca (tree
, bool);
137 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
138 static rtx
expand_builtin_frame_address (tree
, tree
);
139 static tree
stabilize_va_list_loc (location_t
, tree
, int);
140 static rtx
expand_builtin_expect (tree
, rtx
);
141 static tree
fold_builtin_constant_p (tree
);
142 static tree
fold_builtin_classify_type (tree
);
143 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
144 static tree
fold_builtin_inf (location_t
, tree
, int);
145 static tree
fold_builtin_nan (tree
, tree
, int);
146 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
147 static bool validate_arg (const_tree
, enum tree_code code
);
148 static bool integer_valued_real_p (tree
);
149 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
150 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
151 static rtx
expand_builtin_signbit (tree
, rtx
);
152 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
153 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
154 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
155 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
156 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
157 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
158 static tree
fold_builtin_tan (tree
, tree
);
159 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
160 static tree
fold_builtin_floor (location_t
, tree
, tree
);
161 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
162 static tree
fold_builtin_round (location_t
, tree
, tree
);
163 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
164 static tree
fold_builtin_bitop (tree
, tree
);
165 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
166 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
168 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
169 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
170 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
171 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
172 static tree
fold_builtin_isascii (location_t
, tree
);
173 static tree
fold_builtin_toascii (location_t
, tree
);
174 static tree
fold_builtin_isdigit (location_t
, tree
);
175 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
176 static tree
fold_builtin_abs (location_t
, tree
, tree
);
177 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
179 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
180 static tree
fold_builtin_0 (location_t
, tree
, bool);
181 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
182 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
183 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
184 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
185 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
187 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
188 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
189 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
190 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
192 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
193 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
194 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
196 static rtx
expand_builtin_object_size (tree
);
197 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
198 enum built_in_function
);
199 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
200 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
201 static void maybe_emit_free_warning (tree
);
202 static tree
fold_builtin_object_size (tree
, tree
);
203 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
204 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
205 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
206 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
207 enum built_in_function
);
209 static unsigned HOST_WIDE_INT target_newline
;
210 unsigned HOST_WIDE_INT target_percent
;
211 static unsigned HOST_WIDE_INT target_c
;
212 static unsigned HOST_WIDE_INT target_s
;
213 static char target_percent_c
[3];
214 char target_percent_s
[3];
215 static char target_percent_s_newline
[4];
216 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
217 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
218 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
219 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
220 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
221 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
222 static tree
do_mpfr_sincos (tree
, tree
, tree
);
223 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
224 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
225 const REAL_VALUE_TYPE
*, bool);
226 static tree
do_mpfr_remquo (tree
, tree
, tree
);
227 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
228 static void expand_builtin_sync_synchronize (void);
230 /* Return true if NAME starts with __builtin_ or __sync_. */
233 is_builtin_name (const char *name
)
235 if (strncmp (name
, "__builtin_", 10) == 0)
237 if (strncmp (name
, "__sync_", 7) == 0)
239 if (strncmp (name
, "__atomic_", 9) == 0)
242 && (!strcmp (name
, "__cilkrts_detach")
243 || !strcmp (name
, "__cilkrts_pop_frame")))
249 /* Return true if DECL is a function symbol representing a built-in. */
252 is_builtin_fn (tree decl
)
254 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
262 called_as_built_in (tree node
)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
267 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
268 return is_builtin_name (name
);
271 /* Compute values M and N such that M divides (address of EXP - N) and such
272 that N < M. If these numbers can be determined, store M in alignp and N in
273 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
274 *alignp and any bit-offset to *bitposp.
276 Note that the address (and thus the alignment) computed here is based
277 on the address to which a symbol resolves, whereas DECL_ALIGN is based
278 on the address at which an object is actually located. These two
279 addresses are not always the same. For example, on ARM targets,
280 the address &foo of a Thumb function foo() has the lowest bit set,
281 whereas foo() itself starts on an even address.
283 If ADDR_P is true we are taking the address of the memory reference EXP
284 and thus cannot rely on the access taking place. */
287 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
288 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
290 HOST_WIDE_INT bitsize
, bitpos
;
292 enum machine_mode mode
;
293 int unsignedp
, volatilep
;
294 unsigned int align
= BITS_PER_UNIT
;
295 bool known_alignment
= false;
297 /* Get the innermost object and the constant (bitpos) and possibly
298 variable (offset) offset of the access. */
299 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
300 &mode
, &unsignedp
, &volatilep
, true);
302 /* Extract alignment information from the innermost object and
303 possibly adjust bitpos and offset. */
304 if (TREE_CODE (exp
) == FUNCTION_DECL
)
306 /* Function addresses can encode extra information besides their
307 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
308 allows the low bit to be used as a virtual bit, we know
309 that the address itself must be at least 2-byte aligned. */
310 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
311 align
= 2 * BITS_PER_UNIT
;
313 else if (TREE_CODE (exp
) == LABEL_DECL
)
315 else if (TREE_CODE (exp
) == CONST_DECL
)
317 /* The alignment of a CONST_DECL is determined by its initializer. */
318 exp
= DECL_INITIAL (exp
);
319 align
= TYPE_ALIGN (TREE_TYPE (exp
));
320 #ifdef CONSTANT_ALIGNMENT
321 if (CONSTANT_CLASS_P (exp
))
322 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
324 known_alignment
= true;
326 else if (DECL_P (exp
))
328 align
= DECL_ALIGN (exp
);
329 known_alignment
= true;
331 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
333 align
= TYPE_ALIGN (TREE_TYPE (exp
));
335 else if (TREE_CODE (exp
) == INDIRECT_REF
336 || TREE_CODE (exp
) == MEM_REF
337 || TREE_CODE (exp
) == TARGET_MEM_REF
)
339 tree addr
= TREE_OPERAND (exp
, 0);
341 unsigned HOST_WIDE_INT ptr_bitpos
;
343 if (TREE_CODE (addr
) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
346 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
347 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
348 align
*= BITS_PER_UNIT
;
349 addr
= TREE_OPERAND (addr
, 0);
353 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
354 align
= MAX (ptr_align
, align
);
356 /* The alignment of the pointer operand in a TARGET_MEM_REF
357 has to take the variable offset parts into account. */
358 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
362 unsigned HOST_WIDE_INT step
= 1;
364 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
365 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
367 if (TMR_INDEX2 (exp
))
368 align
= BITS_PER_UNIT
;
369 known_alignment
= false;
372 /* When EXP is an actual memory reference then we can use
373 TYPE_ALIGN of a pointer indirection to derive alignment.
374 Do so only if get_pointer_alignment_1 did not reveal absolute
375 alignment knowledge and if using that alignment would
376 improve the situation. */
377 if (!addr_p
&& !known_alignment
378 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
379 align
= TYPE_ALIGN (TREE_TYPE (exp
));
382 /* Else adjust bitpos accordingly. */
383 bitpos
+= ptr_bitpos
;
384 if (TREE_CODE (exp
) == MEM_REF
385 || TREE_CODE (exp
) == TARGET_MEM_REF
)
386 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
389 else if (TREE_CODE (exp
) == STRING_CST
)
391 /* STRING_CST are the only constant objects we allow to be not
392 wrapped inside a CONST_DECL. */
393 align
= TYPE_ALIGN (TREE_TYPE (exp
));
394 #ifdef CONSTANT_ALIGNMENT
395 if (CONSTANT_CLASS_P (exp
))
396 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
398 known_alignment
= true;
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
405 unsigned int trailing_zeros
= tree_ctz (offset
);
406 if (trailing_zeros
< HOST_BITS_PER_INT
)
408 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
410 align
= MIN (align
, inner
);
415 *bitposp
= bitpos
& (*alignp
- 1);
416 return known_alignment
;
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
426 unsigned HOST_WIDE_INT
*bitposp
)
428 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
431 /* Return the alignment in bits of EXP, an object. */
434 get_object_alignment (tree exp
)
436 unsigned HOST_WIDE_INT bitpos
= 0;
439 get_object_alignment_1 (exp
, &align
, &bitpos
);
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
445 align
= (bitpos
& -bitpos
);
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
454 If EXP is not a pointer, false is returned too. */
457 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
458 unsigned HOST_WIDE_INT
*bitposp
)
462 if (TREE_CODE (exp
) == ADDR_EXPR
)
463 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
464 alignp
, bitposp
, true);
465 else if (TREE_CODE (exp
) == SSA_NAME
466 && POINTER_TYPE_P (TREE_TYPE (exp
)))
468 unsigned int ptr_align
, ptr_misalign
;
469 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
471 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
473 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
474 *alignp
= ptr_align
* BITS_PER_UNIT
;
475 /* We cannot really tell whether this result is an approximation. */
481 *alignp
= BITS_PER_UNIT
;
485 else if (TREE_CODE (exp
) == INTEGER_CST
)
487 *alignp
= BIGGEST_ALIGNMENT
;
488 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
489 & (BIGGEST_ALIGNMENT
- 1));
494 *alignp
= BITS_PER_UNIT
;
498 /* Return the alignment in bits of EXP, a pointer valued expression.
499 The alignment returned is, by default, the alignment of the thing that
500 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
502 Otherwise, look at the expression to see if we can do better, i.e., if the
503 expression is actually pointing at an object whose alignment is tighter. */
506 get_pointer_alignment (tree exp
)
508 unsigned HOST_WIDE_INT bitpos
= 0;
511 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
513 /* align and bitpos now specify known low bits of the pointer.
514 ptr & (align - 1) == bitpos. */
517 align
= (bitpos
& -bitpos
);
522 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
523 way, because it could contain a zero byte in the middle.
524 TREE_STRING_LENGTH is the size of the character array, not the string.
526 ONLY_VALUE should be nonzero if the result is not going to be emitted
527 into the instruction stream and zero if it is going to be expanded.
528 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
529 is returned, otherwise NULL, since
530 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
531 evaluate the side-effects.
533 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
534 accesses. Note that this implies the result is not going to be emitted
535 into the instruction stream.
537 The value returned is of type `ssizetype'.
539 Unfortunately, string_constant can't access the values of const char
540 arrays with initializers, so neither can we do so here. */
543 c_strlen (tree src
, int only_value
)
546 HOST_WIDE_INT offset
;
552 if (TREE_CODE (src
) == COND_EXPR
553 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
557 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
558 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
559 if (tree_int_cst_equal (len1
, len2
))
563 if (TREE_CODE (src
) == COMPOUND_EXPR
564 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
565 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
567 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
569 src
= string_constant (src
, &offset_node
);
573 max
= TREE_STRING_LENGTH (src
) - 1;
574 ptr
= TREE_STRING_POINTER (src
);
576 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
578 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
579 compute the offset to the following null if we don't know where to
580 start searching for it. */
583 for (i
= 0; i
< max
; i
++)
587 /* We don't know the starting offset, but we do know that the string
588 has no internal zero bytes. We can assume that the offset falls
589 within the bounds of the string; otherwise, the programmer deserves
590 what he gets. Subtract the offset from the length of the string,
591 and return that. This would perhaps not be valid if we were dealing
592 with named arrays in addition to literal string constants. */
594 return size_diffop_loc (loc
, size_int (max
), offset_node
);
597 /* We have a known offset into the string. Start searching there for
598 a null character if we can represent it as a single HOST_WIDE_INT. */
599 if (offset_node
== 0)
601 else if (! tree_fits_shwi_p (offset_node
))
604 offset
= tree_to_shwi (offset_node
);
606 /* If the offset is known to be out of bounds, warn, and call strlen at
608 if (offset
< 0 || offset
> max
)
610 /* Suppress multiple warnings for propagated constant strings. */
612 && !TREE_NO_WARNING (src
))
614 warning_at (loc
, 0, "offset outside bounds of constant string");
615 TREE_NO_WARNING (src
) = 1;
620 /* Use strlen to search for the first zero byte. Since any strings
621 constructed with build_string will have nulls appended, we win even
622 if we get handed something like (char[4])"abcd".
624 Since OFFSET is our starting index into the string, no further
625 calculation is needed. */
626 return ssize_int (strlen (ptr
+ offset
));
629 /* Return a char pointer for a C string if it is a string constant
630 or sum of string constant and integer constant. */
637 src
= string_constant (src
, &offset_node
);
641 if (offset_node
== 0)
642 return TREE_STRING_POINTER (src
);
643 else if (!tree_fits_uhwi_p (offset_node
)
644 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
647 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
650 /* Return a constant integer corresponding to target reading
651 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
654 c_readstr (const char *str
, enum machine_mode mode
)
658 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
660 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
661 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
662 / HOST_BITS_PER_WIDE_INT
;
664 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
665 for (i
= 0; i
< len
; i
++)
669 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
672 if (WORDS_BIG_ENDIAN
)
673 j
= GET_MODE_SIZE (mode
) - i
- 1;
674 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
675 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
676 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
680 ch
= (unsigned char) str
[i
];
681 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
684 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
685 return immed_wide_int_const (c
, mode
);
688 /* Cast a target constant CST to target CHAR and if that value fits into
689 host char type, return zero and put that value into variable pointed to by
693 target_char_cast (tree cst
, char *p
)
695 unsigned HOST_WIDE_INT val
, hostval
;
697 if (TREE_CODE (cst
) != INTEGER_CST
698 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
701 /* Do not care if it fits or not right here. */
702 val
= TREE_INT_CST_LOW (cst
);
704 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
705 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
708 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
709 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
718 /* Similar to save_expr, but assumes that arbitrary code is not executed
719 in between the multiple evaluations. In particular, we assume that a
720 non-addressable local variable will not be modified. */
723 builtin_save_expr (tree exp
)
725 if (TREE_CODE (exp
) == SSA_NAME
726 || (TREE_ADDRESSABLE (exp
) == 0
727 && (TREE_CODE (exp
) == PARM_DECL
728 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
731 return save_expr (exp
);
734 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
735 times to get the address of either a higher stack frame, or a return
736 address located within it (depending on FNDECL_CODE). */
739 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
743 #ifdef INITIAL_FRAME_ADDRESS_RTX
744 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
748 /* For a zero count with __builtin_return_address, we don't care what
749 frame address we return, because target-specific definitions will
750 override us. Therefore frame pointer elimination is OK, and using
751 the soft frame pointer is OK.
753 For a nonzero count, or a zero count with __builtin_frame_address,
754 we require a stable offset from the current frame pointer to the
755 previous one, so we must use the hard frame pointer, and
756 we must disable frame pointer elimination. */
757 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
758 tem
= frame_pointer_rtx
;
761 tem
= hard_frame_pointer_rtx
;
763 /* Tell reload not to eliminate the frame pointer. */
764 crtl
->accesses_prior_frames
= 1;
768 /* Some machines need special handling before we can access
769 arbitrary frames. For example, on the SPARC, we must first flush
770 all register windows to the stack. */
771 #ifdef SETUP_FRAME_ADDRESSES
773 SETUP_FRAME_ADDRESSES ();
776 /* On the SPARC, the return address is not in the frame, it is in a
777 register. There is no way to access it off of the current frame
778 pointer, but it can be accessed off the previous frame pointer by
779 reading the value from the register window save area. */
780 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
781 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
785 /* Scan back COUNT frames to the specified frame. */
786 for (i
= 0; i
< count
; i
++)
788 /* Assume the dynamic chain pointer is in the word that the
789 frame address points to, unless otherwise specified. */
790 #ifdef DYNAMIC_CHAIN_ADDRESS
791 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
793 tem
= memory_address (Pmode
, tem
);
794 tem
= gen_frame_mem (Pmode
, tem
);
795 tem
= copy_to_reg (tem
);
798 /* For __builtin_frame_address, return what we've got. But, on
799 the SPARC for example, we may have to add a bias. */
800 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
801 #ifdef FRAME_ADDR_RTX
802 return FRAME_ADDR_RTX (tem
);
807 /* For __builtin_return_address, get the return address from that frame. */
808 #ifdef RETURN_ADDR_RTX
809 tem
= RETURN_ADDR_RTX (count
, tem
);
811 tem
= memory_address (Pmode
,
812 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
813 tem
= gen_frame_mem (Pmode
, tem
);
818 /* Alias set used for setjmp buffer. */
819 static alias_set_type setjmp_alias_set
= -1;
821 /* Construct the leading half of a __builtin_setjmp call. Control will
822 return to RECEIVER_LABEL. This is also called directly by the SJLJ
823 exception handling code. */
826 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
828 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
832 if (setjmp_alias_set
== -1)
833 setjmp_alias_set
= new_alias_set ();
835 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
837 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
839 /* We store the frame pointer and the address of receiver_label in
840 the buffer and use the rest of it for the stack save area, which
841 is machine-dependent. */
843 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
844 set_mem_alias_set (mem
, setjmp_alias_set
);
845 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
847 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
848 GET_MODE_SIZE (Pmode
))),
849 set_mem_alias_set (mem
, setjmp_alias_set
);
851 emit_move_insn (validize_mem (mem
),
852 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
854 stack_save
= gen_rtx_MEM (sa_mode
,
855 plus_constant (Pmode
, buf_addr
,
856 2 * GET_MODE_SIZE (Pmode
)));
857 set_mem_alias_set (stack_save
, setjmp_alias_set
);
858 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
860 /* If there is further processing to do, do it. */
861 #ifdef HAVE_builtin_setjmp_setup
862 if (HAVE_builtin_setjmp_setup
)
863 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
866 /* We have a nonlocal label. */
867 cfun
->has_nonlocal_label
= 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
875 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx
);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
886 if (chain
&& REG_P (chain
))
887 emit_clobber (chain
);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 #ifdef HAVE_nonlocal_goto
892 if (! HAVE_nonlocal_goto
)
895 /* First adjust our frame pointer to its actual value. It was
896 previously set to the start of the virtual area corresponding to
897 the stacked variables when we branched here and now needs to be
898 adjusted to the actual hardware fp value.
900 Assignments to virtual registers are converted by
901 instantiate_virtual_regs into the corresponding assignment
902 to the underlying register (fp in this case) that makes
903 the original assignment true.
904 So the following insn will actually be decrementing fp by
905 STARTING_FRAME_OFFSET. */
906 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
908 /* Restoring the frame pointer also modifies the hard frame pointer.
909 Mark it used (so that the previous assignment remains live once
910 the frame pointer is eliminated) and clobbered (to represent the
911 implicit update from the assignment). */
912 emit_use (hard_frame_pointer_rtx
);
913 emit_clobber (hard_frame_pointer_rtx
);
916 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
917 if (fixed_regs
[ARG_POINTER_REGNUM
])
919 #ifdef ELIMINABLE_REGS
920 /* If the argument pointer can be eliminated in favor of the
921 frame pointer, we don't need to restore it. We assume here
922 that if such an elimination is present, it can always be used.
923 This is the case on all known machines; if we don't make this
924 assumption, we do unnecessary saving on many machines. */
926 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
928 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
929 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
930 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
933 if (i
== ARRAY_SIZE (elim_regs
))
936 /* Now restore our arg pointer from the address at which it
937 was saved in our stack frame. */
938 emit_move_insn (crtl
->args
.internal_arg_pointer
,
939 copy_to_reg (get_arg_pointer_save_area ()));
944 #ifdef HAVE_builtin_setjmp_receiver
945 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
946 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
949 #ifdef HAVE_nonlocal_goto_receiver
950 if (HAVE_nonlocal_goto_receiver
)
951 emit_insn (gen_nonlocal_goto_receiver ());
956 /* We must not allow the code we just generated to be reordered by
957 scheduling. Specifically, the update of the frame pointer must
958 happen immediately, not later. */
959 emit_insn (gen_blockage ());
962 /* __builtin_longjmp is passed a pointer to an array of five words (not
963 all will be used on all machines). It operates similarly to the C
964 library function of the same name, but is more efficient. Much of
965 the code below is copied from the handling of non-local gotos. */
968 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
970 rtx fp
, lab
, stack
, insn
, last
;
971 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
973 /* DRAP is needed for stack realign if longjmp is expanded to current
975 if (SUPPORTS_STACK_ALIGNMENT
)
976 crtl
->need_drap
= true;
978 if (setjmp_alias_set
== -1)
979 setjmp_alias_set
= new_alias_set ();
981 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
983 buf_addr
= force_reg (Pmode
, buf_addr
);
985 /* We require that the user must pass a second argument of 1, because
986 that is what builtin_setjmp will return. */
987 gcc_assert (value
== const1_rtx
);
989 last
= get_last_insn ();
990 #ifdef HAVE_builtin_longjmp
991 if (HAVE_builtin_longjmp
)
992 emit_insn (gen_builtin_longjmp (buf_addr
));
996 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
997 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
998 GET_MODE_SIZE (Pmode
)));
1000 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1001 2 * GET_MODE_SIZE (Pmode
)));
1002 set_mem_alias_set (fp
, setjmp_alias_set
);
1003 set_mem_alias_set (lab
, setjmp_alias_set
);
1004 set_mem_alias_set (stack
, setjmp_alias_set
);
1006 /* Pick up FP, label, and SP from the block and jump. This code is
1007 from expand_goto in stmt.c; see there for detailed comments. */
1008 #ifdef HAVE_nonlocal_goto
1009 if (HAVE_nonlocal_goto
)
1010 /* We have to pass a value to the nonlocal_goto pattern that will
1011 get copied into the static_chain pointer, but it does not matter
1012 what that value is, because builtin_setjmp does not use it. */
1013 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1017 lab
= copy_to_reg (lab
);
1019 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1020 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1022 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1023 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1025 emit_use (hard_frame_pointer_rtx
);
1026 emit_use (stack_pointer_rtx
);
1027 emit_indirect_jump (lab
);
1031 /* Search backwards and mark the jump insn as a non-local goto.
1032 Note that this precludes the use of __builtin_longjmp to a
1033 __builtin_setjmp target in the same function. However, we've
1034 already cautioned the user that these functions are for
1035 internal exception handling use only. */
1036 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1038 gcc_assert (insn
!= last
);
1042 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1045 else if (CALL_P (insn
))
1051 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1053 return (iter
->i
< iter
->n
);
1056 /* This function validates the types of a function call argument list
1057 against a specified list of tree_codes. If the last specifier is a 0,
1058 that represents an ellipses, otherwise the last specifier must be a
1062 validate_arglist (const_tree callexpr
, ...)
1064 enum tree_code code
;
1067 const_call_expr_arg_iterator iter
;
1070 va_start (ap
, callexpr
);
1071 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1075 code
= (enum tree_code
) va_arg (ap
, int);
1079 /* This signifies an ellipses, any further arguments are all ok. */
1083 /* This signifies an endlink, if no arguments remain, return
1084 true, otherwise return false. */
1085 res
= !more_const_call_expr_args_p (&iter
);
1088 /* If no parameters remain or the parameter's code does not
1089 match the specified code, return false. Otherwise continue
1090 checking any remaining arguments. */
1091 arg
= next_const_call_expr_arg (&iter
);
1092 if (!validate_arg (arg
, code
))
1099 /* We need gotos here since we can only have one VA_CLOSE in a
1107 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1108 and the address of the save area. */
1111 expand_builtin_nonlocal_goto (tree exp
)
1113 tree t_label
, t_save_area
;
1114 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1116 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1119 t_label
= CALL_EXPR_ARG (exp
, 0);
1120 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1122 r_label
= expand_normal (t_label
);
1123 r_label
= convert_memory_address (Pmode
, r_label
);
1124 r_save_area
= expand_normal (t_save_area
);
1125 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1126 /* Copy the address of the save location to a register just in case it was
1127 based on the frame pointer. */
1128 r_save_area
= copy_to_reg (r_save_area
);
1129 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1130 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1131 plus_constant (Pmode
, r_save_area
,
1132 GET_MODE_SIZE (Pmode
)));
1134 crtl
->has_nonlocal_goto
= 1;
1136 #ifdef HAVE_nonlocal_goto
1137 /* ??? We no longer need to pass the static chain value, afaik. */
1138 if (HAVE_nonlocal_goto
)
1139 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1143 r_label
= copy_to_reg (r_label
);
1145 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1146 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1148 /* Restore frame pointer for containing function. */
1149 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1150 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1152 /* USE of hard_frame_pointer_rtx added for consistency;
1153 not clear if really needed. */
1154 emit_use (hard_frame_pointer_rtx
);
1155 emit_use (stack_pointer_rtx
);
1157 /* If the architecture is using a GP register, we must
1158 conservatively assume that the target function makes use of it.
1159 The prologue of functions with nonlocal gotos must therefore
1160 initialize the GP register to the appropriate value, and we
1161 must then make sure that this value is live at the point
1162 of the jump. (Note that this doesn't necessarily apply
1163 to targets with a nonlocal_goto pattern; they are free
1164 to implement it in their own way. Note also that this is
1165 a no-op if the GP register is a global invariant.) */
1166 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1167 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1168 emit_use (pic_offset_table_rtx
);
1170 emit_indirect_jump (r_label
);
1173 /* Search backwards to the jump insn and mark it as a
1175 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1179 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1182 else if (CALL_P (insn
))
1189 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1190 (not all will be used on all machines) that was passed to __builtin_setjmp.
1191 It updates the stack pointer in that block to correspond to the current
1195 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1197 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1199 = gen_rtx_MEM (sa_mode
,
1202 plus_constant (Pmode
, buf_addr
,
1203 2 * GET_MODE_SIZE (Pmode
))));
1205 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1208 /* Expand a call to __builtin_prefetch. For a target that does not support
1209 data prefetch, evaluate the memory address argument in case it has side
1213 expand_builtin_prefetch (tree exp
)
1215 tree arg0
, arg1
, arg2
;
1219 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1222 arg0
= CALL_EXPR_ARG (exp
, 0);
1224 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1225 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1227 nargs
= call_expr_nargs (exp
);
1229 arg1
= CALL_EXPR_ARG (exp
, 1);
1231 arg1
= integer_zero_node
;
1233 arg2
= CALL_EXPR_ARG (exp
, 2);
1235 arg2
= integer_three_node
;
1237 /* Argument 0 is an address. */
1238 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1240 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1241 if (TREE_CODE (arg1
) != INTEGER_CST
)
1243 error ("second argument to %<__builtin_prefetch%> must be a constant");
1244 arg1
= integer_zero_node
;
1246 op1
= expand_normal (arg1
);
1247 /* Argument 1 must be either zero or one. */
1248 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1250 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1255 /* Argument 2 (locality) must be a compile-time constant int. */
1256 if (TREE_CODE (arg2
) != INTEGER_CST
)
1258 error ("third argument to %<__builtin_prefetch%> must be a constant");
1259 arg2
= integer_zero_node
;
1261 op2
= expand_normal (arg2
);
1262 /* Argument 2 must be 0, 1, 2, or 3. */
1263 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1265 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1269 #ifdef HAVE_prefetch
1272 struct expand_operand ops
[3];
1274 create_address_operand (&ops
[0], op0
);
1275 create_integer_operand (&ops
[1], INTVAL (op1
));
1276 create_integer_operand (&ops
[2], INTVAL (op2
));
1277 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1282 /* Don't do anything with direct references to volatile memory, but
1283 generate code to handle other side effects. */
1284 if (!MEM_P (op0
) && side_effects_p (op0
))
1288 /* Get a MEM rtx for expression EXP which is the address of an operand
1289 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1290 the maximum length of the block of memory that might be accessed or
1294 get_memory_rtx (tree exp
, tree len
)
1296 tree orig_exp
= exp
;
1299 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1300 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1301 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1302 exp
= TREE_OPERAND (exp
, 0);
1304 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1305 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1307 /* Get an expression we can use to find the attributes to assign to MEM.
1308 First remove any nops. */
1309 while (CONVERT_EXPR_P (exp
)
1310 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1311 exp
= TREE_OPERAND (exp
, 0);
1313 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1314 (as builtin stringops may alias with anything). */
1315 exp
= fold_build2 (MEM_REF
,
1316 build_array_type (char_type_node
,
1317 build_range_type (sizetype
,
1318 size_one_node
, len
)),
1319 exp
, build_int_cst (ptr_type_node
, 0));
1321 /* If the MEM_REF has no acceptable address, try to get the base object
1322 from the original address we got, and build an all-aliasing
1323 unknown-sized access to that one. */
1324 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1325 set_mem_attributes (mem
, exp
, 0);
1326 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1327 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1330 exp
= build_fold_addr_expr (exp
);
1331 exp
= fold_build2 (MEM_REF
,
1332 build_array_type (char_type_node
,
1333 build_range_type (sizetype
,
1336 exp
, build_int_cst (ptr_type_node
, 0));
1337 set_mem_attributes (mem
, exp
, 0);
1339 set_mem_alias_set (mem
, 0);
1343 /* Built-in functions to perform an untyped call and return. */
1345 #define apply_args_mode \
1346 (this_target_builtins->x_apply_args_mode)
1347 #define apply_result_mode \
1348 (this_target_builtins->x_apply_result_mode)
1350 /* Return the size required for the block returned by __builtin_apply_args,
1351 and initialize apply_args_mode. */
1354 apply_args_size (void)
1356 static int size
= -1;
1359 enum machine_mode mode
;
1361 /* The values computed by this function never change. */
1364 /* The first value is the incoming arg-pointer. */
1365 size
= GET_MODE_SIZE (Pmode
);
1367 /* The second value is the structure value address unless this is
1368 passed as an "invisible" first argument. */
1369 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1370 size
+= GET_MODE_SIZE (Pmode
);
1372 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1373 if (FUNCTION_ARG_REGNO_P (regno
))
1375 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1377 gcc_assert (mode
!= VOIDmode
);
1379 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1380 if (size
% align
!= 0)
1381 size
= CEIL (size
, align
) * align
;
1382 size
+= GET_MODE_SIZE (mode
);
1383 apply_args_mode
[regno
] = mode
;
1387 apply_args_mode
[regno
] = VOIDmode
;
1393 /* Return the size required for the block returned by __builtin_apply,
1394 and initialize apply_result_mode. */
1397 apply_result_size (void)
1399 static int size
= -1;
1401 enum machine_mode mode
;
1403 /* The values computed by this function never change. */
1408 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1409 if (targetm
.calls
.function_value_regno_p (regno
))
1411 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1413 gcc_assert (mode
!= VOIDmode
);
1415 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1416 if (size
% align
!= 0)
1417 size
= CEIL (size
, align
) * align
;
1418 size
+= GET_MODE_SIZE (mode
);
1419 apply_result_mode
[regno
] = mode
;
1422 apply_result_mode
[regno
] = VOIDmode
;
1424 /* Allow targets that use untyped_call and untyped_return to override
1425 the size so that machine-specific information can be stored here. */
1426 #ifdef APPLY_RESULT_SIZE
1427 size
= APPLY_RESULT_SIZE
;
1433 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1434 /* Create a vector describing the result block RESULT. If SAVEP is true,
1435 the result block is used to save the values; otherwise it is used to
1436 restore the values. */
1439 result_vector (int savep
, rtx result
)
1441 int regno
, size
, align
, nelts
;
1442 enum machine_mode mode
;
1444 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1447 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1448 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1450 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1451 if (size
% align
!= 0)
1452 size
= CEIL (size
, align
) * align
;
1453 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1454 mem
= adjust_address (result
, mode
, size
);
1455 savevec
[nelts
++] = (savep
1456 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1457 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1458 size
+= GET_MODE_SIZE (mode
);
1460 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1462 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1464 /* Save the state required to perform an untyped call with the same
1465 arguments as were passed to the current function. */
1468 expand_builtin_apply_args_1 (void)
1471 int size
, align
, regno
;
1472 enum machine_mode mode
;
1473 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1475 /* Create a block where the arg-pointer, structure value address,
1476 and argument registers can be saved. */
1477 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1479 /* Walk past the arg-pointer and structure value address. */
1480 size
= GET_MODE_SIZE (Pmode
);
1481 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1482 size
+= GET_MODE_SIZE (Pmode
);
1484 /* Save each register used in calling a function to the block. */
1485 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1486 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1488 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1489 if (size
% align
!= 0)
1490 size
= CEIL (size
, align
) * align
;
1492 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1494 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1495 size
+= GET_MODE_SIZE (mode
);
1498 /* Save the arg pointer to the block. */
1499 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1500 #ifdef STACK_GROWS_DOWNWARD
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1505 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1508 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1510 size
= GET_MODE_SIZE (Pmode
);
1512 /* Save the structure value address unless this is passed as an
1513 "invisible" first argument. */
1514 if (struct_incoming_value
)
1516 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1517 copy_to_reg (struct_incoming_value
));
1518 size
+= GET_MODE_SIZE (Pmode
);
1521 /* Return the address of the block. */
1522 return copy_addr_to_reg (XEXP (registers
, 0));
1525 /* __builtin_apply_args returns block of memory allocated on
1526 the stack into which is stored the arg pointer, structure
1527 value address, static chain, and all the registers that might
1528 possibly be used in performing a function call. The code is
1529 moved to the start of the function so the incoming values are
1533 expand_builtin_apply_args (void)
1535 /* Don't do __builtin_apply_args more than once in a function.
1536 Save the result of the first call and reuse it. */
1537 if (apply_args_value
!= 0)
1538 return apply_args_value
;
1540 /* When this function is called, it means that registers must be
1541 saved on entry to this function. So we migrate the
1542 call to the first insn of this function. */
1547 temp
= expand_builtin_apply_args_1 ();
1551 apply_args_value
= temp
;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1559 push_topmost_sequence ();
1560 if (REG_P (crtl
->args
.internal_arg_pointer
)
1561 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1562 emit_insn_before (seq
, parm_birth_insn
);
1564 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1574 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1576 int size
, align
, regno
;
1577 enum machine_mode mode
;
1578 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1579 rtx old_stack_level
= 0;
1580 rtx call_fusage
= 0;
1581 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1583 arguments
= convert_memory_address (Pmode
, arguments
);
1585 /* Create a block where the return registers can be saved. */
1586 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args
= gen_reg_rtx (Pmode
);
1590 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1591 #ifndef STACK_GROWS_DOWNWARD
1592 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1593 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1599 do_pending_stack_adjust ();
1602 /* Save the stack with nonlocal if available. */
1603 #ifdef HAVE_save_stack_nonlocal
1604 if (HAVE_save_stack_nonlocal
)
1605 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1608 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT
)
1621 crtl
->need_drap
= true;
1623 dest
= virtual_outgoing_args_rtx
;
1624 #ifndef STACK_GROWS_DOWNWARD
1625 if (CONST_INT_P (argsize
))
1626 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1628 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1630 dest
= gen_rtx_MEM (BLKmode
, dest
);
1631 set_mem_align (dest
, PARM_BOUNDARY
);
1632 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1633 set_mem_align (src
, PARM_BOUNDARY
);
1634 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1636 /* Refer to the argument block. */
1638 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1639 set_mem_align (arguments
, PARM_BOUNDARY
);
1641 /* Walk past the arg-pointer and structure value address. */
1642 size
= GET_MODE_SIZE (Pmode
);
1644 size
+= GET_MODE_SIZE (Pmode
);
1646 /* Restore each of the registers previously saved. Make USE insns
1647 for each of these registers for use in making the call. */
1648 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1649 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1651 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1652 if (size
% align
!= 0)
1653 size
= CEIL (size
, align
) * align
;
1654 reg
= gen_rtx_REG (mode
, regno
);
1655 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1656 use_reg (&call_fusage
, reg
);
1657 size
+= GET_MODE_SIZE (mode
);
1660 /* Restore the structure value address unless this is passed as an
1661 "invisible" first argument. */
1662 size
= GET_MODE_SIZE (Pmode
);
1665 rtx value
= gen_reg_rtx (Pmode
);
1666 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1667 emit_move_insn (struct_value
, value
);
1668 if (REG_P (struct_value
))
1669 use_reg (&call_fusage
, struct_value
);
1670 size
+= GET_MODE_SIZE (Pmode
);
1673 /* All arguments and registers used for the call are set up by now! */
1674 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1676 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1677 and we don't want to load it into a register as an optimization,
1678 because prepare_call_address already did it if it should be done. */
1679 if (GET_CODE (function
) != SYMBOL_REF
)
1680 function
= memory_address (FUNCTION_MODE
, function
);
1682 /* Generate the actual call instruction and save the return value. */
1683 #ifdef HAVE_untyped_call
1684 if (HAVE_untyped_call
)
1685 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1686 result
, result_vector (1, result
)));
1689 #ifdef HAVE_call_value
1690 if (HAVE_call_value
)
1694 /* Locate the unique return register. It is not possible to
1695 express a call that sets more than one return register using
1696 call_value; use untyped_call for that. In fact, untyped_call
1697 only needs to save the return registers in the given block. */
1698 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1699 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1701 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1703 valreg
= gen_rtx_REG (mode
, regno
);
1706 emit_call_insn (GEN_CALL_VALUE (valreg
,
1707 gen_rtx_MEM (FUNCTION_MODE
, function
),
1708 const0_rtx
, NULL_RTX
, const0_rtx
));
1710 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1716 /* Find the CALL insn we just emitted, and attach the register usage
1718 call_insn
= last_call_insn ();
1719 add_function_usage_to (call_insn
, call_fusage
);
1721 /* Restore the stack. */
1722 #ifdef HAVE_save_stack_nonlocal
1723 if (HAVE_save_stack_nonlocal
)
1724 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1727 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1728 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1732 /* Return the address of the result block. */
1733 result
= copy_addr_to_reg (XEXP (result
, 0));
1734 return convert_memory_address (ptr_mode
, result
);
1737 /* Perform an untyped return. */
1740 expand_builtin_return (rtx result
)
1742 int size
, align
, regno
;
1743 enum machine_mode mode
;
1745 rtx call_fusage
= 0;
1747 result
= convert_memory_address (Pmode
, result
);
1749 apply_result_size ();
1750 result
= gen_rtx_MEM (BLKmode
, result
);
1752 #ifdef HAVE_untyped_return
1753 if (HAVE_untyped_return
)
1755 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1761 /* Restore the return value and note that each value is used. */
1763 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1764 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1766 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1767 if (size
% align
!= 0)
1768 size
= CEIL (size
, align
) * align
;
1769 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1770 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1772 push_to_sequence (call_fusage
);
1774 call_fusage
= get_insns ();
1776 size
+= GET_MODE_SIZE (mode
);
1779 /* Put the USE insns before the return. */
1780 emit_insn (call_fusage
);
1782 /* Return whatever values was restored by jumping directly to the end
1784 expand_naked_return ();
1787 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1789 static enum type_class
1790 type_to_class (tree type
)
1792 switch (TREE_CODE (type
))
1794 case VOID_TYPE
: return void_type_class
;
1795 case INTEGER_TYPE
: return integer_type_class
;
1796 case ENUMERAL_TYPE
: return enumeral_type_class
;
1797 case BOOLEAN_TYPE
: return boolean_type_class
;
1798 case POINTER_TYPE
: return pointer_type_class
;
1799 case REFERENCE_TYPE
: return reference_type_class
;
1800 case OFFSET_TYPE
: return offset_type_class
;
1801 case REAL_TYPE
: return real_type_class
;
1802 case COMPLEX_TYPE
: return complex_type_class
;
1803 case FUNCTION_TYPE
: return function_type_class
;
1804 case METHOD_TYPE
: return method_type_class
;
1805 case RECORD_TYPE
: return record_type_class
;
1807 case QUAL_UNION_TYPE
: return union_type_class
;
1808 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1809 ? string_type_class
: array_type_class
);
1810 case LANG_TYPE
: return lang_type_class
;
1811 default: return no_type_class
;
1815 /* Expand a call EXP to __builtin_classify_type. */
1818 expand_builtin_classify_type (tree exp
)
1820 if (call_expr_nargs (exp
))
1821 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1822 return GEN_INT (no_type_class
);
1825 /* This helper macro, meant to be used in mathfn_built_in below,
1826 determines which among a set of three builtin math functions is
1827 appropriate for a given type mode. The `F' and `L' cases are
1828 automatically generated from the `double' case. */
1829 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1830 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1831 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1832 fcodel = BUILT_IN_MATHFN##L ; break;
1833 /* Similar to above, but appends _R after any F/L suffix. */
1834 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1836 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1837 fcodel = BUILT_IN_MATHFN##L_R ; break;
1839 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1840 if available. If IMPLICIT is true use the implicit builtin declaration,
1841 otherwise use the explicit declaration. If we can't do the conversion,
1845 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1847 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1851 CASE_MATHFN (BUILT_IN_ACOS
)
1852 CASE_MATHFN (BUILT_IN_ACOSH
)
1853 CASE_MATHFN (BUILT_IN_ASIN
)
1854 CASE_MATHFN (BUILT_IN_ASINH
)
1855 CASE_MATHFN (BUILT_IN_ATAN
)
1856 CASE_MATHFN (BUILT_IN_ATAN2
)
1857 CASE_MATHFN (BUILT_IN_ATANH
)
1858 CASE_MATHFN (BUILT_IN_CBRT
)
1859 CASE_MATHFN (BUILT_IN_CEIL
)
1860 CASE_MATHFN (BUILT_IN_CEXPI
)
1861 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1862 CASE_MATHFN (BUILT_IN_COS
)
1863 CASE_MATHFN (BUILT_IN_COSH
)
1864 CASE_MATHFN (BUILT_IN_DREM
)
1865 CASE_MATHFN (BUILT_IN_ERF
)
1866 CASE_MATHFN (BUILT_IN_ERFC
)
1867 CASE_MATHFN (BUILT_IN_EXP
)
1868 CASE_MATHFN (BUILT_IN_EXP10
)
1869 CASE_MATHFN (BUILT_IN_EXP2
)
1870 CASE_MATHFN (BUILT_IN_EXPM1
)
1871 CASE_MATHFN (BUILT_IN_FABS
)
1872 CASE_MATHFN (BUILT_IN_FDIM
)
1873 CASE_MATHFN (BUILT_IN_FLOOR
)
1874 CASE_MATHFN (BUILT_IN_FMA
)
1875 CASE_MATHFN (BUILT_IN_FMAX
)
1876 CASE_MATHFN (BUILT_IN_FMIN
)
1877 CASE_MATHFN (BUILT_IN_FMOD
)
1878 CASE_MATHFN (BUILT_IN_FREXP
)
1879 CASE_MATHFN (BUILT_IN_GAMMA
)
1880 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1881 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1882 CASE_MATHFN (BUILT_IN_HYPOT
)
1883 CASE_MATHFN (BUILT_IN_ILOGB
)
1884 CASE_MATHFN (BUILT_IN_ICEIL
)
1885 CASE_MATHFN (BUILT_IN_IFLOOR
)
1886 CASE_MATHFN (BUILT_IN_INF
)
1887 CASE_MATHFN (BUILT_IN_IRINT
)
1888 CASE_MATHFN (BUILT_IN_IROUND
)
1889 CASE_MATHFN (BUILT_IN_ISINF
)
1890 CASE_MATHFN (BUILT_IN_J0
)
1891 CASE_MATHFN (BUILT_IN_J1
)
1892 CASE_MATHFN (BUILT_IN_JN
)
1893 CASE_MATHFN (BUILT_IN_LCEIL
)
1894 CASE_MATHFN (BUILT_IN_LDEXP
)
1895 CASE_MATHFN (BUILT_IN_LFLOOR
)
1896 CASE_MATHFN (BUILT_IN_LGAMMA
)
1897 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1898 CASE_MATHFN (BUILT_IN_LLCEIL
)
1899 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1900 CASE_MATHFN (BUILT_IN_LLRINT
)
1901 CASE_MATHFN (BUILT_IN_LLROUND
)
1902 CASE_MATHFN (BUILT_IN_LOG
)
1903 CASE_MATHFN (BUILT_IN_LOG10
)
1904 CASE_MATHFN (BUILT_IN_LOG1P
)
1905 CASE_MATHFN (BUILT_IN_LOG2
)
1906 CASE_MATHFN (BUILT_IN_LOGB
)
1907 CASE_MATHFN (BUILT_IN_LRINT
)
1908 CASE_MATHFN (BUILT_IN_LROUND
)
1909 CASE_MATHFN (BUILT_IN_MODF
)
1910 CASE_MATHFN (BUILT_IN_NAN
)
1911 CASE_MATHFN (BUILT_IN_NANS
)
1912 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1913 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1914 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1915 CASE_MATHFN (BUILT_IN_POW
)
1916 CASE_MATHFN (BUILT_IN_POWI
)
1917 CASE_MATHFN (BUILT_IN_POW10
)
1918 CASE_MATHFN (BUILT_IN_REMAINDER
)
1919 CASE_MATHFN (BUILT_IN_REMQUO
)
1920 CASE_MATHFN (BUILT_IN_RINT
)
1921 CASE_MATHFN (BUILT_IN_ROUND
)
1922 CASE_MATHFN (BUILT_IN_SCALB
)
1923 CASE_MATHFN (BUILT_IN_SCALBLN
)
1924 CASE_MATHFN (BUILT_IN_SCALBN
)
1925 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1926 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1927 CASE_MATHFN (BUILT_IN_SIN
)
1928 CASE_MATHFN (BUILT_IN_SINCOS
)
1929 CASE_MATHFN (BUILT_IN_SINH
)
1930 CASE_MATHFN (BUILT_IN_SQRT
)
1931 CASE_MATHFN (BUILT_IN_TAN
)
1932 CASE_MATHFN (BUILT_IN_TANH
)
1933 CASE_MATHFN (BUILT_IN_TGAMMA
)
1934 CASE_MATHFN (BUILT_IN_TRUNC
)
1935 CASE_MATHFN (BUILT_IN_Y0
)
1936 CASE_MATHFN (BUILT_IN_Y1
)
1937 CASE_MATHFN (BUILT_IN_YN
)
1943 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1945 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1947 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1952 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1955 return builtin_decl_explicit (fcode2
);
1958 /* Like mathfn_built_in_1(), but always use the implicit array. */
1961 mathfn_built_in (tree type
, enum built_in_function fn
)
1963 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1966 /* If errno must be maintained, expand the RTL to check if the result,
1967 TARGET, of a built-in function call, EXP, is NaN, and if so set
1971 expand_errno_check (tree exp
, rtx target
)
1973 rtx lab
= gen_label_rtx ();
1975 /* Test the result; if it is NaN, set errno=EDOM because
1976 the argument was not in the domain. */
1977 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1978 NULL_RTX
, NULL_RTX
, lab
,
1979 /* The jump is very likely. */
1980 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1983 /* If this built-in doesn't throw an exception, set errno directly. */
1984 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1986 #ifdef GEN_ERRNO_RTX
1987 rtx errno_rtx
= GEN_ERRNO_RTX
;
1990 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1992 emit_move_insn (errno_rtx
,
1993 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp
) = 0;
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2005 expand_call (exp
, target
, 0);
2010 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2017 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2019 optab builtin_optab
;
2021 tree fndecl
= get_callee_fndecl (exp
);
2022 enum machine_mode mode
;
2023 bool errno_set
= false;
2024 bool try_widening
= false;
2027 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2030 arg
= CALL_EXPR_ARG (exp
, 0);
2032 switch (DECL_FUNCTION_CODE (fndecl
))
2034 CASE_FLT_FN (BUILT_IN_SQRT
):
2035 errno_set
= ! tree_expr_nonnegative_p (arg
);
2036 try_widening
= true;
2037 builtin_optab
= sqrt_optab
;
2039 CASE_FLT_FN (BUILT_IN_EXP
):
2040 errno_set
= true; builtin_optab
= exp_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_EXP10
):
2042 CASE_FLT_FN (BUILT_IN_POW10
):
2043 errno_set
= true; builtin_optab
= exp10_optab
; break;
2044 CASE_FLT_FN (BUILT_IN_EXP2
):
2045 errno_set
= true; builtin_optab
= exp2_optab
; break;
2046 CASE_FLT_FN (BUILT_IN_EXPM1
):
2047 errno_set
= true; builtin_optab
= expm1_optab
; break;
2048 CASE_FLT_FN (BUILT_IN_LOGB
):
2049 errno_set
= true; builtin_optab
= logb_optab
; break;
2050 CASE_FLT_FN (BUILT_IN_LOG
):
2051 errno_set
= true; builtin_optab
= log_optab
; break;
2052 CASE_FLT_FN (BUILT_IN_LOG10
):
2053 errno_set
= true; builtin_optab
= log10_optab
; break;
2054 CASE_FLT_FN (BUILT_IN_LOG2
):
2055 errno_set
= true; builtin_optab
= log2_optab
; break;
2056 CASE_FLT_FN (BUILT_IN_LOG1P
):
2057 errno_set
= true; builtin_optab
= log1p_optab
; break;
2058 CASE_FLT_FN (BUILT_IN_ASIN
):
2059 builtin_optab
= asin_optab
; break;
2060 CASE_FLT_FN (BUILT_IN_ACOS
):
2061 builtin_optab
= acos_optab
; break;
2062 CASE_FLT_FN (BUILT_IN_TAN
):
2063 builtin_optab
= tan_optab
; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN
):
2065 builtin_optab
= atan_optab
; break;
2066 CASE_FLT_FN (BUILT_IN_FLOOR
):
2067 builtin_optab
= floor_optab
; break;
2068 CASE_FLT_FN (BUILT_IN_CEIL
):
2069 builtin_optab
= ceil_optab
; break;
2070 CASE_FLT_FN (BUILT_IN_TRUNC
):
2071 builtin_optab
= btrunc_optab
; break;
2072 CASE_FLT_FN (BUILT_IN_ROUND
):
2073 builtin_optab
= round_optab
; break;
2074 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2075 builtin_optab
= nearbyint_optab
;
2076 if (flag_trapping_math
)
2078 /* Else fallthrough and expand as rint. */
2079 CASE_FLT_FN (BUILT_IN_RINT
):
2080 builtin_optab
= rint_optab
; break;
2081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2082 builtin_optab
= significand_optab
; break;
2087 /* Make a suitable register to place result in. */
2088 mode
= TYPE_MODE (TREE_TYPE (exp
));
2090 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2093 /* Before working hard, check whether the instruction is available, but try
2094 to widen the mode for specific operations. */
2095 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2096 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2097 && (!errno_set
|| !optimize_insn_for_size_p ()))
2099 rtx result
= gen_reg_rtx (mode
);
2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
2104 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2106 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2117 expand_errno_check (exp
, result
);
2119 /* Output the entire sequence. */
2120 insns
= get_insns ();
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2132 return expand_call (exp
, target
, target
== const0_rtx
);
2135 /* Expand a call to the builtin binary math functions (pow and atan2).
2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2143 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2145 optab builtin_optab
;
2146 rtx op0
, op1
, insns
, result
;
2147 int op1_type
= REAL_TYPE
;
2148 tree fndecl
= get_callee_fndecl (exp
);
2150 enum machine_mode mode
;
2151 bool errno_set
= true;
2153 switch (DECL_FUNCTION_CODE (fndecl
))
2155 CASE_FLT_FN (BUILT_IN_SCALBN
):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2157 CASE_FLT_FN (BUILT_IN_LDEXP
):
2158 op1_type
= INTEGER_TYPE
;
2163 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2166 arg0
= CALL_EXPR_ARG (exp
, 0);
2167 arg1
= CALL_EXPR_ARG (exp
, 1);
2169 switch (DECL_FUNCTION_CODE (fndecl
))
2171 CASE_FLT_FN (BUILT_IN_POW
):
2172 builtin_optab
= pow_optab
; break;
2173 CASE_FLT_FN (BUILT_IN_ATAN2
):
2174 builtin_optab
= atan2_optab
; break;
2175 CASE_FLT_FN (BUILT_IN_SCALB
):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2178 builtin_optab
= scalb_optab
; break;
2179 CASE_FLT_FN (BUILT_IN_SCALBN
):
2180 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2183 /* Fall through... */
2184 CASE_FLT_FN (BUILT_IN_LDEXP
):
2185 builtin_optab
= ldexp_optab
; break;
2186 CASE_FLT_FN (BUILT_IN_FMOD
):
2187 builtin_optab
= fmod_optab
; break;
2188 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2189 CASE_FLT_FN (BUILT_IN_DREM
):
2190 builtin_optab
= remainder_optab
; break;
2195 /* Make a suitable register to place result in. */
2196 mode
= TYPE_MODE (TREE_TYPE (exp
));
2198 /* Before working hard, check whether the instruction is available. */
2199 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2202 result
= gen_reg_rtx (mode
);
2204 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2207 if (errno_set
&& optimize_insn_for_size_p ())
2210 /* Always stabilize the argument list. */
2211 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2212 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2214 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2215 op1
= expand_normal (arg1
);
2219 /* Compute into RESULT.
2220 Set RESULT to wherever the result comes back. */
2221 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2222 result
, 0, OPTAB_DIRECT
);
2224 /* If we were unable to expand via the builtin, stop the sequence
2225 (without outputting the insns) and call to the library function
2226 with the stabilized argument list. */
2230 return expand_call (exp
, target
, target
== const0_rtx
);
2234 expand_errno_check (exp
, result
);
2236 /* Output the entire sequence. */
2237 insns
= get_insns ();
2244 /* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2252 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2254 optab builtin_optab
;
2255 rtx op0
, op1
, op2
, insns
, result
;
2256 tree fndecl
= get_callee_fndecl (exp
);
2257 tree arg0
, arg1
, arg2
;
2258 enum machine_mode mode
;
2260 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2263 arg0
= CALL_EXPR_ARG (exp
, 0);
2264 arg1
= CALL_EXPR_ARG (exp
, 1);
2265 arg2
= CALL_EXPR_ARG (exp
, 2);
2267 switch (DECL_FUNCTION_CODE (fndecl
))
2269 CASE_FLT_FN (BUILT_IN_FMA
):
2270 builtin_optab
= fma_optab
; break;
2275 /* Make a suitable register to place result in. */
2276 mode
= TYPE_MODE (TREE_TYPE (exp
));
2278 /* Before working hard, check whether the instruction is available. */
2279 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2282 result
= gen_reg_rtx (mode
);
2284 /* Always stabilize the argument list. */
2285 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2286 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2287 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2289 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2290 op1
= expand_normal (arg1
);
2291 op2
= expand_normal (arg2
);
2295 /* Compute into RESULT.
2296 Set RESULT to wherever the result comes back. */
2297 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2300 /* If we were unable to expand via the builtin, stop the sequence
2301 (without outputting the insns) and call to the library function
2302 with the stabilized argument list. */
2306 return expand_call (exp
, target
, target
== const0_rtx
);
2309 /* Output the entire sequence. */
2310 insns
= get_insns ();
2317 /* Expand a call to the builtin sin and cos math functions.
2318 Return NULL_RTX if a normal call should be emitted rather than expanding the
2319 function in-line. EXP is the expression that is a call to the builtin
2320 function; if convenient, the result should be placed in TARGET.
2321 SUBTARGET may be used as the target for computing one of EXP's
2325 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2327 optab builtin_optab
;
2329 tree fndecl
= get_callee_fndecl (exp
);
2330 enum machine_mode mode
;
2333 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2336 arg
= CALL_EXPR_ARG (exp
, 0);
2338 switch (DECL_FUNCTION_CODE (fndecl
))
2340 CASE_FLT_FN (BUILT_IN_SIN
):
2341 CASE_FLT_FN (BUILT_IN_COS
):
2342 builtin_optab
= sincos_optab
; break;
2347 /* Make a suitable register to place result in. */
2348 mode
= TYPE_MODE (TREE_TYPE (exp
));
2350 /* Check if sincos insn is available, otherwise fallback
2351 to sin or cos insn. */
2352 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2353 switch (DECL_FUNCTION_CODE (fndecl
))
2355 CASE_FLT_FN (BUILT_IN_SIN
):
2356 builtin_optab
= sin_optab
; break;
2357 CASE_FLT_FN (BUILT_IN_COS
):
2358 builtin_optab
= cos_optab
; break;
2363 /* Before working hard, check whether the instruction is available. */
2364 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2366 rtx result
= gen_reg_rtx (mode
);
2368 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2369 need to expand the argument again. This way, we will not perform
2370 side-effects more the once. */
2371 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2373 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2377 /* Compute into RESULT.
2378 Set RESULT to wherever the result comes back. */
2379 if (builtin_optab
== sincos_optab
)
2383 switch (DECL_FUNCTION_CODE (fndecl
))
2385 CASE_FLT_FN (BUILT_IN_SIN
):
2386 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2388 CASE_FLT_FN (BUILT_IN_COS
):
2389 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2397 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2401 /* Output the entire sequence. */
2402 insns
= get_insns ();
2408 /* If we were unable to expand via the builtin, stop the sequence
2409 (without outputting the insns) and call to the library function
2410 with the stabilized argument list. */
2414 return expand_call (exp
, target
, target
== const0_rtx
);
2417 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2418 return an RTL instruction code that implements the functionality.
2419 If that isn't possible or available return CODE_FOR_nothing. */
2421 static enum insn_code
2422 interclass_mathfn_icode (tree arg
, tree fndecl
)
2424 bool errno_set
= false;
2425 optab builtin_optab
= unknown_optab
;
2426 enum machine_mode mode
;
2428 switch (DECL_FUNCTION_CODE (fndecl
))
2430 CASE_FLT_FN (BUILT_IN_ILOGB
):
2431 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2432 CASE_FLT_FN (BUILT_IN_ISINF
):
2433 builtin_optab
= isinf_optab
; break;
2434 case BUILT_IN_ISNORMAL
:
2435 case BUILT_IN_ISFINITE
:
2436 CASE_FLT_FN (BUILT_IN_FINITE
):
2437 case BUILT_IN_FINITED32
:
2438 case BUILT_IN_FINITED64
:
2439 case BUILT_IN_FINITED128
:
2440 case BUILT_IN_ISINFD32
:
2441 case BUILT_IN_ISINFD64
:
2442 case BUILT_IN_ISINFD128
:
2443 /* These builtins have no optabs (yet). */
2449 /* There's no easy way to detect the case we need to set EDOM. */
2450 if (flag_errno_math
&& errno_set
)
2451 return CODE_FOR_nothing
;
2453 /* Optab mode depends on the mode of the input argument. */
2454 mode
= TYPE_MODE (TREE_TYPE (arg
));
2457 return optab_handler (builtin_optab
, mode
);
2458 return CODE_FOR_nothing
;
2461 /* Expand a call to one of the builtin math functions that operate on
2462 floating point argument and output an integer result (ilogb, isinf,
2464 Return 0 if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function; if convenient, the result should be placed in TARGET. */
2469 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2471 enum insn_code icode
= CODE_FOR_nothing
;
2473 tree fndecl
= get_callee_fndecl (exp
);
2474 enum machine_mode mode
;
2477 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2480 arg
= CALL_EXPR_ARG (exp
, 0);
2481 icode
= interclass_mathfn_icode (arg
, fndecl
);
2482 mode
= TYPE_MODE (TREE_TYPE (arg
));
2484 if (icode
!= CODE_FOR_nothing
)
2486 struct expand_operand ops
[1];
2487 rtx last
= get_last_insn ();
2488 tree orig_arg
= arg
;
2490 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2491 need to expand the argument again. This way, we will not perform
2492 side-effects more the once. */
2493 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2495 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2497 if (mode
!= GET_MODE (op0
))
2498 op0
= convert_to_mode (mode
, op0
, 0);
2500 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2501 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2502 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2503 return ops
[0].value
;
2505 delete_insns_since (last
);
2506 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2512 /* Expand a call to the builtin sincos math function.
2513 Return NULL_RTX if a normal call should be emitted rather than expanding the
2514 function in-line. EXP is the expression that is a call to the builtin
2518 expand_builtin_sincos (tree exp
)
2520 rtx op0
, op1
, op2
, target1
, target2
;
2521 enum machine_mode mode
;
2522 tree arg
, sinp
, cosp
;
2524 location_t loc
= EXPR_LOCATION (exp
);
2525 tree alias_type
, alias_off
;
2527 if (!validate_arglist (exp
, REAL_TYPE
,
2528 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2531 arg
= CALL_EXPR_ARG (exp
, 0);
2532 sinp
= CALL_EXPR_ARG (exp
, 1);
2533 cosp
= CALL_EXPR_ARG (exp
, 2);
2535 /* Make a suitable register to place result in. */
2536 mode
= TYPE_MODE (TREE_TYPE (arg
));
2538 /* Check if sincos insn is available, otherwise emit the call. */
2539 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2542 target1
= gen_reg_rtx (mode
);
2543 target2
= gen_reg_rtx (mode
);
2545 op0
= expand_normal (arg
);
2546 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2547 alias_off
= build_int_cst (alias_type
, 0);
2548 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2550 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2553 /* Compute into target1 and target2.
2554 Set TARGET to wherever the result comes back. */
2555 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2556 gcc_assert (result
);
2558 /* Move target1 and target2 to the memory locations indicated
2560 emit_move_insn (op1
, target1
);
2561 emit_move_insn (op2
, target2
);
2566 /* Expand a call to the internal cexpi builtin to the sincos math function.
2567 EXP is the expression that is a call to the builtin function; if convenient,
2568 the result should be placed in TARGET. */
2571 expand_builtin_cexpi (tree exp
, rtx target
)
2573 tree fndecl
= get_callee_fndecl (exp
);
2575 enum machine_mode mode
;
2577 location_t loc
= EXPR_LOCATION (exp
);
2579 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2582 arg
= CALL_EXPR_ARG (exp
, 0);
2583 type
= TREE_TYPE (arg
);
2584 mode
= TYPE_MODE (TREE_TYPE (arg
));
2586 /* Try expanding via a sincos optab, fall back to emitting a libcall
2587 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2588 is only generated from sincos, cexp or if we have either of them. */
2589 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2591 op1
= gen_reg_rtx (mode
);
2592 op2
= gen_reg_rtx (mode
);
2594 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2596 /* Compute into op1 and op2. */
2597 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2599 else if (targetm
.libc_has_function (function_sincos
))
2601 tree call
, fn
= NULL_TREE
;
2605 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2606 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2607 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2608 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2609 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2610 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2614 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2615 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2616 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2617 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2618 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2619 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2621 /* Make sure not to fold the sincos call again. */
2622 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2623 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2624 call
, 3, arg
, top1
, top2
));
2628 tree call
, fn
= NULL_TREE
, narg
;
2629 tree ctype
= build_complex_type (type
);
2631 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2632 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2633 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2634 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2635 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2636 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2640 /* If we don't have a decl for cexp create one. This is the
2641 friendliest fallback if the user calls __builtin_cexpi
2642 without full target C99 function support. */
2643 if (fn
== NULL_TREE
)
2646 const char *name
= NULL
;
2648 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2650 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2652 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2655 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2656 fn
= build_fn_decl (name
, fntype
);
2659 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2660 build_real (type
, dconst0
), arg
);
2662 /* Make sure not to fold the cexp call again. */
2663 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2664 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2665 target
, VOIDmode
, EXPAND_NORMAL
);
2668 /* Now build the proper return type. */
2669 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2670 make_tree (TREE_TYPE (arg
), op2
),
2671 make_tree (TREE_TYPE (arg
), op1
)),
2672 target
, VOIDmode
, EXPAND_NORMAL
);
2675 /* Conveniently construct a function call expression. FNDECL names the
2676 function to be called, N is the number of arguments, and the "..."
2677 parameters are the argument expressions. Unlike build_call_exr
2678 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2681 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2684 tree fntype
= TREE_TYPE (fndecl
);
2685 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2688 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2690 SET_EXPR_LOCATION (fn
, loc
);
2694 /* Expand a call to one of the builtin rounding functions gcc defines
2695 as an extension (lfloor and lceil). As these are gcc extensions we
2696 do not need to worry about setting errno to EDOM.
2697 If expanding via optab fails, lower expression to (int)(floor(x)).
2698 EXP is the expression that is a call to the builtin function;
2699 if convenient, the result should be placed in TARGET. */
2702 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2704 convert_optab builtin_optab
;
2705 rtx op0
, insns
, tmp
;
2706 tree fndecl
= get_callee_fndecl (exp
);
2707 enum built_in_function fallback_fn
;
2708 tree fallback_fndecl
;
2709 enum machine_mode mode
;
2712 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2715 arg
= CALL_EXPR_ARG (exp
, 0);
2717 switch (DECL_FUNCTION_CODE (fndecl
))
2719 CASE_FLT_FN (BUILT_IN_ICEIL
):
2720 CASE_FLT_FN (BUILT_IN_LCEIL
):
2721 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2722 builtin_optab
= lceil_optab
;
2723 fallback_fn
= BUILT_IN_CEIL
;
2726 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2727 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2728 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2729 builtin_optab
= lfloor_optab
;
2730 fallback_fn
= BUILT_IN_FLOOR
;
2737 /* Make a suitable register to place result in. */
2738 mode
= TYPE_MODE (TREE_TYPE (exp
));
2740 target
= gen_reg_rtx (mode
);
2742 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2743 need to expand the argument again. This way, we will not perform
2744 side-effects more the once. */
2745 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2747 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2751 /* Compute into TARGET. */
2752 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2754 /* Output the entire sequence. */
2755 insns
= get_insns ();
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns). */
2765 /* Fall back to floating point rounding optab. */
2766 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2768 /* For non-C99 targets we may end up without a fallback fndecl here
2769 if the user called __builtin_lfloor directly. In this case emit
2770 a call to the floor/ceil variants nevertheless. This should result
2771 in the best user experience for not full C99 targets. */
2772 if (fallback_fndecl
== NULL_TREE
)
2775 const char *name
= NULL
;
2777 switch (DECL_FUNCTION_CODE (fndecl
))
2779 case BUILT_IN_ICEIL
:
2780 case BUILT_IN_LCEIL
:
2781 case BUILT_IN_LLCEIL
:
2784 case BUILT_IN_ICEILF
:
2785 case BUILT_IN_LCEILF
:
2786 case BUILT_IN_LLCEILF
:
2789 case BUILT_IN_ICEILL
:
2790 case BUILT_IN_LCEILL
:
2791 case BUILT_IN_LLCEILL
:
2794 case BUILT_IN_IFLOOR
:
2795 case BUILT_IN_LFLOOR
:
2796 case BUILT_IN_LLFLOOR
:
2799 case BUILT_IN_IFLOORF
:
2800 case BUILT_IN_LFLOORF
:
2801 case BUILT_IN_LLFLOORF
:
2804 case BUILT_IN_IFLOORL
:
2805 case BUILT_IN_LFLOORL
:
2806 case BUILT_IN_LLFLOORL
:
2813 fntype
= build_function_type_list (TREE_TYPE (arg
),
2814 TREE_TYPE (arg
), NULL_TREE
);
2815 fallback_fndecl
= build_fn_decl (name
, fntype
);
2818 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2820 tmp
= expand_normal (exp
);
2821 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2823 /* Truncate the result of floating point optab to integer
2824 via expand_fix (). */
2825 target
= gen_reg_rtx (mode
);
2826 expand_fix (target
, tmp
, 0);
2831 /* Expand a call to one of the builtin math functions doing integer
2833 Return 0 if a normal call should be emitted rather than expanding the
2834 function in-line. EXP is the expression that is a call to the builtin
2835 function; if convenient, the result should be placed in TARGET. */
2838 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2840 convert_optab builtin_optab
;
2842 tree fndecl
= get_callee_fndecl (exp
);
2844 enum machine_mode mode
;
2845 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2847 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2850 arg
= CALL_EXPR_ARG (exp
, 0);
2852 switch (DECL_FUNCTION_CODE (fndecl
))
2854 CASE_FLT_FN (BUILT_IN_IRINT
):
2855 fallback_fn
= BUILT_IN_LRINT
;
2857 CASE_FLT_FN (BUILT_IN_LRINT
):
2858 CASE_FLT_FN (BUILT_IN_LLRINT
):
2859 builtin_optab
= lrint_optab
;
2862 CASE_FLT_FN (BUILT_IN_IROUND
):
2863 fallback_fn
= BUILT_IN_LROUND
;
2865 CASE_FLT_FN (BUILT_IN_LROUND
):
2866 CASE_FLT_FN (BUILT_IN_LLROUND
):
2867 builtin_optab
= lround_optab
;
2874 /* There's no easy way to detect the case we need to set EDOM. */
2875 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2878 /* Make a suitable register to place result in. */
2879 mode
= TYPE_MODE (TREE_TYPE (exp
));
2881 /* There's no easy way to detect the case we need to set EDOM. */
2882 if (!flag_errno_math
)
2884 rtx result
= gen_reg_rtx (mode
);
2886 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2887 need to expand the argument again. This way, we will not perform
2888 side-effects more the once. */
2889 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2891 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2895 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2897 /* Output the entire sequence. */
2898 insns
= get_insns ();
2904 /* If we were unable to expand via the builtin, stop the sequence
2905 (without outputting the insns) and call to the library function
2906 with the stabilized argument list. */
2910 if (fallback_fn
!= BUILT_IN_NONE
)
2912 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2913 targets, (int) round (x) should never be transformed into
2914 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2915 a call to lround in the hope that the target provides at least some
2916 C99 functions. This should result in the best user experience for
2917 not full C99 targets. */
2918 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2921 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2922 fallback_fndecl
, 1, arg
);
2924 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2925 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2926 return convert_to_mode (mode
, target
, 0);
2929 return expand_call (exp
, target
, target
== const0_rtx
);
2932 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2933 a normal call should be emitted rather than expanding the function
2934 in-line. EXP is the expression that is a call to the builtin
2935 function; if convenient, the result should be placed in TARGET. */
2938 expand_builtin_powi (tree exp
, rtx target
)
2942 enum machine_mode mode
;
2943 enum machine_mode mode2
;
2945 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2948 arg0
= CALL_EXPR_ARG (exp
, 0);
2949 arg1
= CALL_EXPR_ARG (exp
, 1);
2950 mode
= TYPE_MODE (TREE_TYPE (exp
));
2952 /* Emit a libcall to libgcc. */
2954 /* Mode of the 2nd argument must match that of an int. */
2955 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2957 if (target
== NULL_RTX
)
2958 target
= gen_reg_rtx (mode
);
2960 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2961 if (GET_MODE (op0
) != mode
)
2962 op0
= convert_to_mode (mode
, op0
, 0);
2963 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2964 if (GET_MODE (op1
) != mode2
)
2965 op1
= convert_to_mode (mode2
, op1
, 0);
2967 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2968 target
, LCT_CONST
, mode
, 2,
2969 op0
, mode
, op1
, mode2
);
2974 /* Expand expression EXP which is a call to the strlen builtin. Return
2975 NULL_RTX if we failed the caller should emit a normal call, otherwise
2976 try to get the result in TARGET, if convenient. */
2979 expand_builtin_strlen (tree exp
, rtx target
,
2980 enum machine_mode target_mode
)
2982 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2986 struct expand_operand ops
[4];
2989 tree src
= CALL_EXPR_ARG (exp
, 0);
2990 rtx src_reg
, before_strlen
;
2991 enum machine_mode insn_mode
= target_mode
;
2992 enum insn_code icode
= CODE_FOR_nothing
;
2995 /* If the length can be computed at compile-time, return it. */
2996 len
= c_strlen (src
, 0);
2998 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3000 /* If the length can be computed at compile-time and is constant
3001 integer, but there are side-effects in src, evaluate
3002 src for side-effects, then return len.
3003 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3004 can be optimized into: i++; x = 3; */
3005 len
= c_strlen (src
, 1);
3006 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3008 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3009 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3012 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3014 /* If SRC is not a pointer type, don't do this operation inline. */
3018 /* Bail out if we can't compute strlen in the right mode. */
3019 while (insn_mode
!= VOIDmode
)
3021 icode
= optab_handler (strlen_optab
, insn_mode
);
3022 if (icode
!= CODE_FOR_nothing
)
3025 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3027 if (insn_mode
== VOIDmode
)
3030 /* Make a place to hold the source address. We will not expand
3031 the actual source until we are sure that the expansion will
3032 not fail -- there are trees that cannot be expanded twice. */
3033 src_reg
= gen_reg_rtx (Pmode
);
3035 /* Mark the beginning of the strlen sequence so we can emit the
3036 source operand later. */
3037 before_strlen
= get_last_insn ();
3039 create_output_operand (&ops
[0], target
, insn_mode
);
3040 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3041 create_integer_operand (&ops
[2], 0);
3042 create_integer_operand (&ops
[3], align
);
3043 if (!maybe_expand_insn (icode
, 4, ops
))
3046 /* Now that we are assured of success, expand the source. */
3048 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3051 #ifdef POINTERS_EXTEND_UNSIGNED
3052 if (GET_MODE (pat
) != Pmode
)
3053 pat
= convert_to_mode (Pmode
, pat
,
3054 POINTERS_EXTEND_UNSIGNED
);
3056 emit_move_insn (src_reg
, pat
);
3062 emit_insn_after (pat
, before_strlen
);
3064 emit_insn_before (pat
, get_insns ());
3066 /* Return the value in the proper mode for this function. */
3067 if (GET_MODE (ops
[0].value
) == target_mode
)
3068 target
= ops
[0].value
;
3069 else if (target
!= 0)
3070 convert_move (target
, ops
[0].value
, 0);
3072 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3078 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3079 bytes from constant string DATA + OFFSET and return it as target
3083 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3084 enum machine_mode mode
)
3086 const char *str
= (const char *) data
;
3088 gcc_assert (offset
>= 0
3089 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3090 <= strlen (str
) + 1));
3092 return c_readstr (str
+ offset
, mode
);
3095 /* LEN specify length of the block of memcpy/memset operation.
3096 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3097 In some cases we can make very likely guess on max size, then we
3098 set it into PROBABLE_MAX_SIZE. */
3101 determine_block_size (tree len
, rtx len_rtx
,
3102 unsigned HOST_WIDE_INT
*min_size
,
3103 unsigned HOST_WIDE_INT
*max_size
,
3104 unsigned HOST_WIDE_INT
*probable_max_size
)
3106 if (CONST_INT_P (len_rtx
))
3108 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3114 enum value_range_type range_type
= VR_UNDEFINED
;
3116 /* Determine bounds from the type. */
3117 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3118 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3121 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3122 *probable_max_size
= *max_size
3123 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3125 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3127 if (TREE_CODE (len
) == SSA_NAME
)
3128 range_type
= get_range_info (len
, &min
, &max
);
3129 if (range_type
== VR_RANGE
)
3131 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3132 *min_size
= min
.to_uhwi ();
3133 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3134 *probable_max_size
= *max_size
= max
.to_uhwi ();
3136 else if (range_type
== VR_ANTI_RANGE
)
3138 /* Anti range 0...N lets us to determine minimal size to N+1. */
3141 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3142 *min_size
= max
.to_uhwi () + 1;
3150 Produce anti range allowing negative values of N. We still
3151 can use the information and make a guess that N is not negative.
3153 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3154 *probable_max_size
= min
.to_uhwi () - 1;
3157 gcc_checking_assert (*max_size
<=
3158 (unsigned HOST_WIDE_INT
)
3159 GET_MODE_MASK (GET_MODE (len_rtx
)));
3162 /* Expand a call EXP to the memcpy builtin.
3163 Return NULL_RTX if we failed, the caller should emit a normal call,
3164 otherwise try to get the result in TARGET, if convenient (and in
3165 mode MODE if that's convenient). */
3168 expand_builtin_memcpy (tree exp
, rtx target
)
3170 if (!validate_arglist (exp
,
3171 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3175 tree dest
= CALL_EXPR_ARG (exp
, 0);
3176 tree src
= CALL_EXPR_ARG (exp
, 1);
3177 tree len
= CALL_EXPR_ARG (exp
, 2);
3178 const char *src_str
;
3179 unsigned int src_align
= get_pointer_alignment (src
);
3180 unsigned int dest_align
= get_pointer_alignment (dest
);
3181 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3182 HOST_WIDE_INT expected_size
= -1;
3183 unsigned int expected_align
= 0;
3184 unsigned HOST_WIDE_INT min_size
;
3185 unsigned HOST_WIDE_INT max_size
;
3186 unsigned HOST_WIDE_INT probable_max_size
;
3188 /* If DEST is not a pointer type, call the normal function. */
3189 if (dest_align
== 0)
3192 /* If either SRC is not a pointer type, don't do this
3193 operation in-line. */
3197 if (currently_expanding_gimple_stmt
)
3198 stringop_block_profile (currently_expanding_gimple_stmt
,
3199 &expected_align
, &expected_size
);
3201 if (expected_align
< dest_align
)
3202 expected_align
= dest_align
;
3203 dest_mem
= get_memory_rtx (dest
, len
);
3204 set_mem_align (dest_mem
, dest_align
);
3205 len_rtx
= expand_normal (len
);
3206 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3207 &probable_max_size
);
3208 src_str
= c_getstr (src
);
3210 /* If SRC is a string constant and block move would be done
3211 by pieces, we can avoid loading the string from memory
3212 and only stored the computed constants. */
3214 && CONST_INT_P (len_rtx
)
3215 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3216 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3217 CONST_CAST (char *, src_str
),
3220 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3221 builtin_memcpy_read_str
,
3222 CONST_CAST (char *, src_str
),
3223 dest_align
, false, 0);
3224 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3225 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3229 src_mem
= get_memory_rtx (src
, len
);
3230 set_mem_align (src_mem
, src_align
);
3232 /* Copy word part most expediently. */
3233 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3234 CALL_EXPR_TAILCALL (exp
)
3235 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3236 expected_align
, expected_size
,
3237 min_size
, max_size
, probable_max_size
);
3241 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3242 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3248 /* Expand a call EXP to the mempcpy builtin.
3249 Return NULL_RTX if we failed; the caller should emit a normal call,
3250 otherwise try to get the result in TARGET, if convenient (and in
3251 mode MODE if that's convenient). If ENDP is 0 return the
3252 destination pointer, if ENDP is 1 return the end pointer ala
3253 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3257 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3259 if (!validate_arglist (exp
,
3260 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3264 tree dest
= CALL_EXPR_ARG (exp
, 0);
3265 tree src
= CALL_EXPR_ARG (exp
, 1);
3266 tree len
= CALL_EXPR_ARG (exp
, 2);
3267 return expand_builtin_mempcpy_args (dest
, src
, len
,
3268 target
, mode
, /*endp=*/ 1);
3272 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3273 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3274 so that this can also be called without constructing an actual CALL_EXPR.
3275 The other arguments and return value are the same as for
3276 expand_builtin_mempcpy. */
3279 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3280 rtx target
, enum machine_mode mode
, int endp
)
3282 /* If return value is ignored, transform mempcpy into memcpy. */
3283 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3285 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3286 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3288 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3292 const char *src_str
;
3293 unsigned int src_align
= get_pointer_alignment (src
);
3294 unsigned int dest_align
= get_pointer_alignment (dest
);
3295 rtx dest_mem
, src_mem
, len_rtx
;
3297 /* If either SRC or DEST is not a pointer type, don't do this
3298 operation in-line. */
3299 if (dest_align
== 0 || src_align
== 0)
3302 /* If LEN is not constant, call the normal function. */
3303 if (! tree_fits_uhwi_p (len
))
3306 len_rtx
= expand_normal (len
);
3307 src_str
= c_getstr (src
);
3309 /* If SRC is a string constant and block move would be done
3310 by pieces, we can avoid loading the string from memory
3311 and only stored the computed constants. */
3313 && CONST_INT_P (len_rtx
)
3314 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3315 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3316 CONST_CAST (char *, src_str
),
3319 dest_mem
= get_memory_rtx (dest
, len
);
3320 set_mem_align (dest_mem
, dest_align
);
3321 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3322 builtin_memcpy_read_str
,
3323 CONST_CAST (char *, src_str
),
3324 dest_align
, false, endp
);
3325 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3326 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3330 if (CONST_INT_P (len_rtx
)
3331 && can_move_by_pieces (INTVAL (len_rtx
),
3332 MIN (dest_align
, src_align
)))
3334 dest_mem
= get_memory_rtx (dest
, len
);
3335 set_mem_align (dest_mem
, dest_align
);
3336 src_mem
= get_memory_rtx (src
, len
);
3337 set_mem_align (src_mem
, src_align
);
3338 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3339 MIN (dest_align
, src_align
), endp
);
3340 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3341 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3350 # define HAVE_movstr 0
3351 # define CODE_FOR_movstr CODE_FOR_nothing
3354 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3355 we failed, the caller should emit a normal call, otherwise try to
3356 get the result in TARGET, if convenient. If ENDP is 0 return the
3357 destination pointer, if ENDP is 1 return the end pointer ala
3358 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3362 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3364 struct expand_operand ops
[3];
3371 dest_mem
= get_memory_rtx (dest
, NULL
);
3372 src_mem
= get_memory_rtx (src
, NULL
);
3375 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3376 dest_mem
= replace_equiv_address (dest_mem
, target
);
3379 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3380 create_fixed_operand (&ops
[1], dest_mem
);
3381 create_fixed_operand (&ops
[2], src_mem
);
3382 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3385 if (endp
&& target
!= const0_rtx
)
3387 target
= ops
[0].value
;
3388 /* movstr is supposed to set end to the address of the NUL
3389 terminator. If the caller requested a mempcpy-like return value,
3393 rtx tem
= plus_constant (GET_MODE (target
),
3394 gen_lowpart (GET_MODE (target
), target
), 1);
3395 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3401 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3402 NULL_RTX if we failed the caller should emit a normal call, otherwise
3403 try to get the result in TARGET, if convenient (and in mode MODE if that's
3407 expand_builtin_strcpy (tree exp
, rtx target
)
3409 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3411 tree dest
= CALL_EXPR_ARG (exp
, 0);
3412 tree src
= CALL_EXPR_ARG (exp
, 1);
3413 return expand_builtin_strcpy_args (dest
, src
, target
);
3418 /* Helper function to do the actual work for expand_builtin_strcpy. The
3419 arguments to the builtin_strcpy call DEST and SRC are broken out
3420 so that this can also be called without constructing an actual CALL_EXPR.
3421 The other arguments and return value are the same as for
3422 expand_builtin_strcpy. */
3425 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3427 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3430 /* Expand a call EXP to the stpcpy builtin.
3431 Return NULL_RTX if we failed the caller should emit a normal call,
3432 otherwise try to get the result in TARGET, if convenient (and in
3433 mode MODE if that's convenient). */
3436 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3439 location_t loc
= EXPR_LOCATION (exp
);
3441 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3444 dst
= CALL_EXPR_ARG (exp
, 0);
3445 src
= CALL_EXPR_ARG (exp
, 1);
3447 /* If return value is ignored, transform stpcpy into strcpy. */
3448 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3450 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3451 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3452 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3459 /* Ensure we get an actual string whose length can be evaluated at
3460 compile-time, not an expression containing a string. This is
3461 because the latter will potentially produce pessimized code
3462 when used to produce the return value. */
3463 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3464 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3466 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3467 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3468 target
, mode
, /*endp=*/2);
3473 if (TREE_CODE (len
) == INTEGER_CST
)
3475 rtx len_rtx
= expand_normal (len
);
3477 if (CONST_INT_P (len_rtx
))
3479 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3485 if (mode
!= VOIDmode
)
3486 target
= gen_reg_rtx (mode
);
3488 target
= gen_reg_rtx (GET_MODE (ret
));
3490 if (GET_MODE (target
) != GET_MODE (ret
))
3491 ret
= gen_lowpart (GET_MODE (target
), ret
);
3493 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3494 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3502 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3506 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3507 bytes from constant string DATA + OFFSET and return it as target
3511 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3512 enum machine_mode mode
)
3514 const char *str
= (const char *) data
;
3516 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3519 return c_readstr (str
+ offset
, mode
);
3522 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3523 NULL_RTX if we failed the caller should emit a normal call. */
3526 expand_builtin_strncpy (tree exp
, rtx target
)
3528 location_t loc
= EXPR_LOCATION (exp
);
3530 if (validate_arglist (exp
,
3531 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3533 tree dest
= CALL_EXPR_ARG (exp
, 0);
3534 tree src
= CALL_EXPR_ARG (exp
, 1);
3535 tree len
= CALL_EXPR_ARG (exp
, 2);
3536 tree slen
= c_strlen (src
, 1);
3538 /* We must be passed a constant len and src parameter. */
3539 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3542 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3544 /* We're required to pad with trailing zeros if the requested
3545 len is greater than strlen(s2)+1. In that case try to
3546 use store_by_pieces, if it fails, punt. */
3547 if (tree_int_cst_lt (slen
, len
))
3549 unsigned int dest_align
= get_pointer_alignment (dest
);
3550 const char *p
= c_getstr (src
);
3553 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3554 || !can_store_by_pieces (tree_to_uhwi (len
),
3555 builtin_strncpy_read_str
,
3556 CONST_CAST (char *, p
),
3560 dest_mem
= get_memory_rtx (dest
, len
);
3561 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3562 builtin_strncpy_read_str
,
3563 CONST_CAST (char *, p
), dest_align
, false, 0);
3564 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3565 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3572 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3573 bytes from constant string DATA + OFFSET and return it as target
3577 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3578 enum machine_mode mode
)
3580 const char *c
= (const char *) data
;
3581 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3583 memset (p
, *c
, GET_MODE_SIZE (mode
));
3585 return c_readstr (p
, mode
);
3588 /* Callback routine for store_by_pieces. Return the RTL of a register
3589 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3590 char value given in the RTL register data. For example, if mode is
3591 4 bytes wide, return the RTL for 0x01010101*data. */
3594 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3595 enum machine_mode mode
)
3601 size
= GET_MODE_SIZE (mode
);
3605 p
= XALLOCAVEC (char, size
);
3606 memset (p
, 1, size
);
3607 coeff
= c_readstr (p
, mode
);
3609 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3610 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3611 return force_reg (mode
, target
);
3614 /* Expand expression EXP, which is a call to the memset builtin. Return
3615 NULL_RTX if we failed the caller should emit a normal call, otherwise
3616 try to get the result in TARGET, if convenient (and in mode MODE if that's
3620 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3622 if (!validate_arglist (exp
,
3623 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3627 tree dest
= CALL_EXPR_ARG (exp
, 0);
3628 tree val
= CALL_EXPR_ARG (exp
, 1);
3629 tree len
= CALL_EXPR_ARG (exp
, 2);
3630 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3634 /* Helper function to do the actual work for expand_builtin_memset. The
3635 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3636 so that this can also be called without constructing an actual CALL_EXPR.
3637 The other arguments and return value are the same as for
3638 expand_builtin_memset. */
3641 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3642 rtx target
, enum machine_mode mode
, tree orig_exp
)
3645 enum built_in_function fcode
;
3646 enum machine_mode val_mode
;
3648 unsigned int dest_align
;
3649 rtx dest_mem
, dest_addr
, len_rtx
;
3650 HOST_WIDE_INT expected_size
= -1;
3651 unsigned int expected_align
= 0;
3652 unsigned HOST_WIDE_INT min_size
;
3653 unsigned HOST_WIDE_INT max_size
;
3654 unsigned HOST_WIDE_INT probable_max_size
;
3656 dest_align
= get_pointer_alignment (dest
);
3658 /* If DEST is not a pointer type, don't do this operation in-line. */
3659 if (dest_align
== 0)
3662 if (currently_expanding_gimple_stmt
)
3663 stringop_block_profile (currently_expanding_gimple_stmt
,
3664 &expected_align
, &expected_size
);
3666 if (expected_align
< dest_align
)
3667 expected_align
= dest_align
;
3669 /* If the LEN parameter is zero, return DEST. */
3670 if (integer_zerop (len
))
3672 /* Evaluate and ignore VAL in case it has side-effects. */
3673 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3674 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3677 /* Stabilize the arguments in case we fail. */
3678 dest
= builtin_save_expr (dest
);
3679 val
= builtin_save_expr (val
);
3680 len
= builtin_save_expr (len
);
3682 len_rtx
= expand_normal (len
);
3683 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3684 &probable_max_size
);
3685 dest_mem
= get_memory_rtx (dest
, len
);
3686 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3688 if (TREE_CODE (val
) != INTEGER_CST
)
3692 val_rtx
= expand_normal (val
);
3693 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3695 /* Assume that we can memset by pieces if we can store
3696 * the coefficients by pieces (in the required modes).
3697 * We can't pass builtin_memset_gen_str as that emits RTL. */
3699 if (tree_fits_uhwi_p (len
)
3700 && can_store_by_pieces (tree_to_uhwi (len
),
3701 builtin_memset_read_str
, &c
, dest_align
,
3704 val_rtx
= force_reg (val_mode
, val_rtx
);
3705 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3706 builtin_memset_gen_str
, val_rtx
, dest_align
,
3709 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3710 dest_align
, expected_align
,
3711 expected_size
, min_size
, max_size
,
3715 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3716 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3720 if (target_char_cast (val
, &c
))
3725 if (tree_fits_uhwi_p (len
)
3726 && can_store_by_pieces (tree_to_uhwi (len
),
3727 builtin_memset_read_str
, &c
, dest_align
,
3729 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3730 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3731 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3732 gen_int_mode (c
, val_mode
),
3733 dest_align
, expected_align
,
3734 expected_size
, min_size
, max_size
,
3738 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3739 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3743 set_mem_align (dest_mem
, dest_align
);
3744 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3745 CALL_EXPR_TAILCALL (orig_exp
)
3746 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3747 expected_align
, expected_size
,
3753 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3754 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3760 fndecl
= get_callee_fndecl (orig_exp
);
3761 fcode
= DECL_FUNCTION_CODE (fndecl
);
3762 if (fcode
== BUILT_IN_MEMSET
)
3763 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3765 else if (fcode
== BUILT_IN_BZERO
)
3766 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3770 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3771 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3772 return expand_call (fn
, target
, target
== const0_rtx
);
3775 /* Expand expression EXP, which is a call to the bzero builtin. Return
3776 NULL_RTX if we failed the caller should emit a normal call. */
3779 expand_builtin_bzero (tree exp
)
3782 location_t loc
= EXPR_LOCATION (exp
);
3784 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3787 dest
= CALL_EXPR_ARG (exp
, 0);
3788 size
= CALL_EXPR_ARG (exp
, 1);
3790 /* New argument list transforming bzero(ptr x, int y) to
3791 memset(ptr x, int 0, size_t y). This is done this way
3792 so that if it isn't expanded inline, we fallback to
3793 calling bzero instead of memset. */
3795 return expand_builtin_memset_args (dest
, integer_zero_node
,
3796 fold_convert_loc (loc
,
3797 size_type_node
, size
),
3798 const0_rtx
, VOIDmode
, exp
);
3801 /* Expand expression EXP, which is a call to the memcmp built-in function.
3802 Return NULL_RTX if we failed and the caller should emit a normal call,
3803 otherwise try to get the result in TARGET, if convenient (and in mode
3804 MODE, if that's convenient). */
3807 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3808 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3810 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3812 if (!validate_arglist (exp
,
3813 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3816 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3817 implementing memcmp because it will stop if it encounters two
3819 #if defined HAVE_cmpmemsi
3821 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3824 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3825 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3826 tree len
= CALL_EXPR_ARG (exp
, 2);
3828 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3829 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3830 enum machine_mode insn_mode
;
3833 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3837 /* If we don't have POINTER_TYPE, call the function. */
3838 if (arg1_align
== 0 || arg2_align
== 0)
3841 /* Make a place to write the result of the instruction. */
3844 && REG_P (result
) && GET_MODE (result
) == insn_mode
3845 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3846 result
= gen_reg_rtx (insn_mode
);
3848 arg1_rtx
= get_memory_rtx (arg1
, len
);
3849 arg2_rtx
= get_memory_rtx (arg2
, len
);
3850 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3852 /* Set MEM_SIZE as appropriate. */
3853 if (CONST_INT_P (arg3_rtx
))
3855 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3856 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3860 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3861 GEN_INT (MIN (arg1_align
, arg2_align
)));
3868 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3869 TYPE_MODE (integer_type_node
), 3,
3870 XEXP (arg1_rtx
, 0), Pmode
,
3871 XEXP (arg2_rtx
, 0), Pmode
,
3872 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3873 TYPE_UNSIGNED (sizetype
)),
3874 TYPE_MODE (sizetype
));
3876 /* Return the value in the proper mode for this function. */
3877 mode
= TYPE_MODE (TREE_TYPE (exp
));
3878 if (GET_MODE (result
) == mode
)
3880 else if (target
!= 0)
3882 convert_move (target
, result
, 0);
3886 return convert_to_mode (mode
, result
, 0);
3888 #endif /* HAVE_cmpmemsi. */
3893 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3894 if we failed the caller should emit a normal call, otherwise try to get
3895 the result in TARGET, if convenient. */
3898 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3900 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3903 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3904 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3905 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3907 rtx arg1_rtx
, arg2_rtx
;
3908 rtx result
, insn
= NULL_RTX
;
3910 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3911 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3913 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3914 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3916 /* If we don't have POINTER_TYPE, call the function. */
3917 if (arg1_align
== 0 || arg2_align
== 0)
3920 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3921 arg1
= builtin_save_expr (arg1
);
3922 arg2
= builtin_save_expr (arg2
);
3924 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3925 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3927 #ifdef HAVE_cmpstrsi
3928 /* Try to call cmpstrsi. */
3931 enum machine_mode insn_mode
3932 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3934 /* Make a place to write the result of the instruction. */
3937 && REG_P (result
) && GET_MODE (result
) == insn_mode
3938 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3939 result
= gen_reg_rtx (insn_mode
);
3941 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3942 GEN_INT (MIN (arg1_align
, arg2_align
)));
3945 #ifdef HAVE_cmpstrnsi
3946 /* Try to determine at least one length and call cmpstrnsi. */
3947 if (!insn
&& HAVE_cmpstrnsi
)
3952 enum machine_mode insn_mode
3953 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3954 tree len1
= c_strlen (arg1
, 1);
3955 tree len2
= c_strlen (arg2
, 1);
3958 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3960 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3962 /* If we don't have a constant length for the first, use the length
3963 of the second, if we know it. We don't require a constant for
3964 this case; some cost analysis could be done if both are available
3965 but neither is constant. For now, assume they're equally cheap,
3966 unless one has side effects. If both strings have constant lengths,
3973 else if (TREE_SIDE_EFFECTS (len1
))
3975 else if (TREE_SIDE_EFFECTS (len2
))
3977 else if (TREE_CODE (len1
) != INTEGER_CST
)
3979 else if (TREE_CODE (len2
) != INTEGER_CST
)
3981 else if (tree_int_cst_lt (len1
, len2
))
3986 /* If both arguments have side effects, we cannot optimize. */
3987 if (!len
|| TREE_SIDE_EFFECTS (len
))
3990 arg3_rtx
= expand_normal (len
);
3992 /* Make a place to write the result of the instruction. */
3995 && REG_P (result
) && GET_MODE (result
) == insn_mode
3996 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3997 result
= gen_reg_rtx (insn_mode
);
3999 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4000 GEN_INT (MIN (arg1_align
, arg2_align
)));
4006 enum machine_mode mode
;
4009 /* Return the value in the proper mode for this function. */
4010 mode
= TYPE_MODE (TREE_TYPE (exp
));
4011 if (GET_MODE (result
) == mode
)
4014 return convert_to_mode (mode
, result
, 0);
4015 convert_move (target
, result
, 0);
4019 /* Expand the library call ourselves using a stabilized argument
4020 list to avoid re-evaluating the function's arguments twice. */
4021 #ifdef HAVE_cmpstrnsi
4024 fndecl
= get_callee_fndecl (exp
);
4025 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4026 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4027 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4028 return expand_call (fn
, target
, target
== const0_rtx
);
4034 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4036 the result in TARGET, if convenient. */
4039 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4040 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4042 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4044 if (!validate_arglist (exp
,
4045 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4048 /* If c_strlen can determine an expression for one of the string
4049 lengths, and it doesn't have side effects, then emit cmpstrnsi
4050 using length MIN(strlen(string)+1, arg3). */
4051 #ifdef HAVE_cmpstrnsi
4054 tree len
, len1
, len2
;
4055 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4058 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4059 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4060 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4062 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4063 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4064 enum machine_mode insn_mode
4065 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4067 len1
= c_strlen (arg1
, 1);
4068 len2
= c_strlen (arg2
, 1);
4071 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4073 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4075 /* If we don't have a constant length for the first, use the length
4076 of the second, if we know it. We don't require a constant for
4077 this case; some cost analysis could be done if both are available
4078 but neither is constant. For now, assume they're equally cheap,
4079 unless one has side effects. If both strings have constant lengths,
4086 else if (TREE_SIDE_EFFECTS (len1
))
4088 else if (TREE_SIDE_EFFECTS (len2
))
4090 else if (TREE_CODE (len1
) != INTEGER_CST
)
4092 else if (TREE_CODE (len2
) != INTEGER_CST
)
4094 else if (tree_int_cst_lt (len1
, len2
))
4099 /* If both arguments have side effects, we cannot optimize. */
4100 if (!len
|| TREE_SIDE_EFFECTS (len
))
4103 /* The actual new length parameter is MIN(len,arg3). */
4104 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4105 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4107 /* If we don't have POINTER_TYPE, call the function. */
4108 if (arg1_align
== 0 || arg2_align
== 0)
4111 /* Make a place to write the result of the instruction. */
4114 && REG_P (result
) && GET_MODE (result
) == insn_mode
4115 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4116 result
= gen_reg_rtx (insn_mode
);
4118 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4119 arg1
= builtin_save_expr (arg1
);
4120 arg2
= builtin_save_expr (arg2
);
4121 len
= builtin_save_expr (len
);
4123 arg1_rtx
= get_memory_rtx (arg1
, len
);
4124 arg2_rtx
= get_memory_rtx (arg2
, len
);
4125 arg3_rtx
= expand_normal (len
);
4126 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4127 GEN_INT (MIN (arg1_align
, arg2_align
)));
4132 /* Return the value in the proper mode for this function. */
4133 mode
= TYPE_MODE (TREE_TYPE (exp
));
4134 if (GET_MODE (result
) == mode
)
4137 return convert_to_mode (mode
, result
, 0);
4138 convert_move (target
, result
, 0);
4142 /* Expand the library call ourselves using a stabilized argument
4143 list to avoid re-evaluating the function's arguments twice. */
4144 fndecl
= get_callee_fndecl (exp
);
4145 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4147 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4148 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4149 return expand_call (fn
, target
, target
== const0_rtx
);
4155 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4156 if that's convenient. */
4159 expand_builtin_saveregs (void)
4163 /* Don't do __builtin_saveregs more than once in a function.
4164 Save the result of the first call and reuse it. */
4165 if (saveregs_value
!= 0)
4166 return saveregs_value
;
4168 /* When this function is called, it means that registers must be
4169 saved on entry to this function. So we migrate the call to the
4170 first insn of this function. */
4174 /* Do whatever the machine needs done in this case. */
4175 val
= targetm
.calls
.expand_builtin_saveregs ();
4180 saveregs_value
= val
;
4182 /* Put the insns after the NOTE that starts the function. If this
4183 is inside a start_sequence, make the outer-level insn chain current, so
4184 the code is placed at the start of the function. */
4185 push_topmost_sequence ();
4186 emit_insn_after (seq
, entry_of_function ());
4187 pop_topmost_sequence ();
4192 /* Expand a call to __builtin_next_arg. */
4195 expand_builtin_next_arg (void)
4197 /* Checking arguments is already done in fold_builtin_next_arg
4198 that must be called before this function. */
4199 return expand_binop (ptr_mode
, add_optab
,
4200 crtl
->args
.internal_arg_pointer
,
4201 crtl
->args
.arg_offset_rtx
,
4202 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4205 /* Make it easier for the backends by protecting the valist argument
4206 from multiple evaluations. */
4209 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4211 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4213 /* The current way of determining the type of valist is completely
4214 bogus. We should have the information on the va builtin instead. */
4216 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4218 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4220 if (TREE_SIDE_EFFECTS (valist
))
4221 valist
= save_expr (valist
);
4223 /* For this case, the backends will be expecting a pointer to
4224 vatype, but it's possible we've actually been given an array
4225 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4227 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4229 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4230 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4235 tree pt
= build_pointer_type (vatype
);
4239 if (! TREE_SIDE_EFFECTS (valist
))
4242 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4243 TREE_SIDE_EFFECTS (valist
) = 1;
4246 if (TREE_SIDE_EFFECTS (valist
))
4247 valist
= save_expr (valist
);
4248 valist
= fold_build2_loc (loc
, MEM_REF
,
4249 vatype
, valist
, build_int_cst (pt
, 0));
4255 /* The "standard" definition of va_list is void*. */
4258 std_build_builtin_va_list (void)
4260 return ptr_type_node
;
4263 /* The "standard" abi va_list is va_list_type_node. */
4266 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4268 return va_list_type_node
;
4271 /* The "standard" type of va_list is va_list_type_node. */
4274 std_canonical_va_list_type (tree type
)
4278 if (INDIRECT_REF_P (type
))
4279 type
= TREE_TYPE (type
);
4280 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4281 type
= TREE_TYPE (type
);
4282 wtype
= va_list_type_node
;
4284 /* Treat structure va_list types. */
4285 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4286 htype
= TREE_TYPE (htype
);
4287 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4289 /* If va_list is an array type, the argument may have decayed
4290 to a pointer type, e.g. by being passed to another function.
4291 In that case, unwrap both types so that we can compare the
4292 underlying records. */
4293 if (TREE_CODE (htype
) == ARRAY_TYPE
4294 || POINTER_TYPE_P (htype
))
4296 wtype
= TREE_TYPE (wtype
);
4297 htype
= TREE_TYPE (htype
);
4300 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4301 return va_list_type_node
;
4306 /* The "standard" implementation of va_start: just assign `nextarg' to
4310 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4312 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4313 convert_move (va_r
, nextarg
, 0);
4316 /* Expand EXP, a call to __builtin_va_start. */
4319 expand_builtin_va_start (tree exp
)
4323 location_t loc
= EXPR_LOCATION (exp
);
4325 if (call_expr_nargs (exp
) < 2)
4327 error_at (loc
, "too few arguments to function %<va_start%>");
4331 if (fold_builtin_next_arg (exp
, true))
4334 nextarg
= expand_builtin_next_arg ();
4335 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4337 if (targetm
.expand_builtin_va_start
)
4338 targetm
.expand_builtin_va_start (valist
, nextarg
);
4340 std_expand_builtin_va_start (valist
, nextarg
);
4345 /* Expand EXP, a call to __builtin_va_end. */
4348 expand_builtin_va_end (tree exp
)
4350 tree valist
= CALL_EXPR_ARG (exp
, 0);
4352 /* Evaluate for side effects, if needed. I hate macros that don't
4354 if (TREE_SIDE_EFFECTS (valist
))
4355 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4360 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4361 builtin rather than just as an assignment in stdarg.h because of the
4362 nastiness of array-type va_list types. */
4365 expand_builtin_va_copy (tree exp
)
4368 location_t loc
= EXPR_LOCATION (exp
);
4370 dst
= CALL_EXPR_ARG (exp
, 0);
4371 src
= CALL_EXPR_ARG (exp
, 1);
4373 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4374 src
= stabilize_va_list_loc (loc
, src
, 0);
4376 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4378 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4380 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4381 TREE_SIDE_EFFECTS (t
) = 1;
4382 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4386 rtx dstb
, srcb
, size
;
4388 /* Evaluate to pointers. */
4389 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4390 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4391 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4392 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4394 dstb
= convert_memory_address (Pmode
, dstb
);
4395 srcb
= convert_memory_address (Pmode
, srcb
);
4397 /* "Dereference" to BLKmode memories. */
4398 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4399 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4400 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4401 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4402 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4403 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4406 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4412 /* Expand a call to one of the builtin functions __builtin_frame_address or
4413 __builtin_return_address. */
4416 expand_builtin_frame_address (tree fndecl
, tree exp
)
4418 /* The argument must be a nonnegative integer constant.
4419 It counts the number of frames to scan up the stack.
4420 The value is the return address saved in that frame. */
4421 if (call_expr_nargs (exp
) == 0)
4422 /* Warning about missing arg was already issued. */
4424 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4426 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4427 error ("invalid argument to %<__builtin_frame_address%>");
4429 error ("invalid argument to %<__builtin_return_address%>");
4435 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4436 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4438 /* Some ports cannot access arbitrary stack frames. */
4441 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4442 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4444 warning (0, "unsupported argument to %<__builtin_return_address%>");
4448 /* For __builtin_frame_address, return what we've got. */
4449 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4453 && ! CONSTANT_P (tem
))
4454 tem
= copy_addr_to_reg (tem
);
4459 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4460 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4461 is the same as for allocate_dynamic_stack_space. */
4464 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4470 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4471 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4474 = (alloca_with_align
4475 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4476 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4481 /* Compute the argument. */
4482 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4484 /* Compute the alignment. */
4485 align
= (alloca_with_align
4486 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4487 : BIGGEST_ALIGNMENT
);
4489 /* Allocate the desired space. */
4490 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4491 result
= convert_memory_address (ptr_mode
, result
);
4496 /* Expand a call to bswap builtin in EXP.
4497 Return NULL_RTX if a normal call should be emitted rather than expanding the
4498 function in-line. If convenient, the result should be placed in TARGET.
4499 SUBTARGET may be used as the target for computing one of EXP's operands. */
4502 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4508 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4511 arg
= CALL_EXPR_ARG (exp
, 0);
4512 op0
= expand_expr (arg
,
4513 subtarget
&& GET_MODE (subtarget
) == target_mode
4514 ? subtarget
: NULL_RTX
,
4515 target_mode
, EXPAND_NORMAL
);
4516 if (GET_MODE (op0
) != target_mode
)
4517 op0
= convert_to_mode (target_mode
, op0
, 1);
4519 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4521 gcc_assert (target
);
4523 return convert_to_mode (target_mode
, target
, 1);
4526 /* Expand a call to a unary builtin in EXP.
4527 Return NULL_RTX if a normal call should be emitted rather than expanding the
4528 function in-line. If convenient, the result should be placed in TARGET.
4529 SUBTARGET may be used as the target for computing one of EXP's operands. */
4532 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4533 rtx subtarget
, optab op_optab
)
4537 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4540 /* Compute the argument. */
4541 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4543 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4544 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4545 VOIDmode
, EXPAND_NORMAL
);
4546 /* Compute op, into TARGET if possible.
4547 Set TARGET to wherever the result comes back. */
4548 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4549 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4550 gcc_assert (target
);
4552 return convert_to_mode (target_mode
, target
, 0);
4555 /* Expand a call to __builtin_expect. We just return our argument
4556 as the builtin_expect semantic should've been already executed by
4557 tree branch prediction pass. */
4560 expand_builtin_expect (tree exp
, rtx target
)
4564 if (call_expr_nargs (exp
) < 2)
4566 arg
= CALL_EXPR_ARG (exp
, 0);
4568 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4569 /* When guessing was done, the hints should be already stripped away. */
4570 gcc_assert (!flag_guess_branch_prob
4571 || optimize
== 0 || seen_error ());
4575 /* Expand a call to __builtin_assume_aligned. We just return our first
4576 argument as the builtin_assume_aligned semantic should've been already
4580 expand_builtin_assume_aligned (tree exp
, rtx target
)
4582 if (call_expr_nargs (exp
) < 2)
4584 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4586 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4587 && (call_expr_nargs (exp
) < 3
4588 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4593 expand_builtin_trap (void)
4598 rtx insn
= emit_insn (gen_trap ());
4599 /* For trap insns when not accumulating outgoing args force
4600 REG_ARGS_SIZE note to prevent crossjumping of calls with
4601 different args sizes. */
4602 if (!ACCUMULATE_OUTGOING_ARGS
)
4603 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4607 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4611 /* Expand a call to __builtin_unreachable. We do nothing except emit
4612 a barrier saying that control flow will not pass here.
4614 It is the responsibility of the program being compiled to ensure
4615 that control flow does never reach __builtin_unreachable. */
4617 expand_builtin_unreachable (void)
4622 /* Expand EXP, a call to fabs, fabsf or fabsl.
4623 Return NULL_RTX if a normal call should be emitted rather than expanding
4624 the function inline. If convenient, the result should be placed
4625 in TARGET. SUBTARGET may be used as the target for computing
4629 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4631 enum machine_mode mode
;
4635 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4638 arg
= CALL_EXPR_ARG (exp
, 0);
4639 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4640 mode
= TYPE_MODE (TREE_TYPE (arg
));
4641 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4642 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4645 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4646 Return NULL is a normal call should be emitted rather than expanding the
4647 function inline. If convenient, the result should be placed in TARGET.
4648 SUBTARGET may be used as the target for computing the operand. */
4651 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4656 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4659 arg
= CALL_EXPR_ARG (exp
, 0);
4660 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4662 arg
= CALL_EXPR_ARG (exp
, 1);
4663 op1
= expand_normal (arg
);
4665 return expand_copysign (op0
, op1
, target
);
4668 /* Expand a call to __builtin___clear_cache. */
4671 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4673 #ifndef HAVE_clear_cache
4674 #ifdef CLEAR_INSN_CACHE
4675 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4676 does something. Just do the default expansion to a call to
4680 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4681 does nothing. There is no need to call it. Do nothing. */
4683 #endif /* CLEAR_INSN_CACHE */
4685 /* We have a "clear_cache" insn, and it will handle everything. */
4687 rtx begin_rtx
, end_rtx
;
4689 /* We must not expand to a library call. If we did, any
4690 fallback library function in libgcc that might contain a call to
4691 __builtin___clear_cache() would recurse infinitely. */
4692 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4694 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4698 if (HAVE_clear_cache
)
4700 struct expand_operand ops
[2];
4702 begin
= CALL_EXPR_ARG (exp
, 0);
4703 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4705 end
= CALL_EXPR_ARG (exp
, 1);
4706 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4708 create_address_operand (&ops
[0], begin_rtx
);
4709 create_address_operand (&ops
[1], end_rtx
);
4710 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4714 #endif /* HAVE_clear_cache */
4717 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4720 round_trampoline_addr (rtx tramp
)
4722 rtx temp
, addend
, mask
;
4724 /* If we don't need too much alignment, we'll have been guaranteed
4725 proper alignment by get_trampoline_type. */
4726 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4729 /* Round address up to desired boundary. */
4730 temp
= gen_reg_rtx (Pmode
);
4731 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4732 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4734 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4735 temp
, 0, OPTAB_LIB_WIDEN
);
4736 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4737 temp
, 0, OPTAB_LIB_WIDEN
);
4743 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4745 tree t_tramp
, t_func
, t_chain
;
4746 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4748 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4749 POINTER_TYPE
, VOID_TYPE
))
4752 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4753 t_func
= CALL_EXPR_ARG (exp
, 1);
4754 t_chain
= CALL_EXPR_ARG (exp
, 2);
4756 r_tramp
= expand_normal (t_tramp
);
4757 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4758 MEM_NOTRAP_P (m_tramp
) = 1;
4760 /* If ONSTACK, the TRAMP argument should be the address of a field
4761 within the local function's FRAME decl. Either way, let's see if
4762 we can fill in the MEM_ATTRs for this memory. */
4763 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4764 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4766 /* Creator of a heap trampoline is responsible for making sure the
4767 address is aligned to at least STACK_BOUNDARY. Normally malloc
4768 will ensure this anyhow. */
4769 tmp
= round_trampoline_addr (r_tramp
);
4772 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4773 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4774 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4777 /* The FUNC argument should be the address of the nested function.
4778 Extract the actual function decl to pass to the hook. */
4779 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4780 t_func
= TREE_OPERAND (t_func
, 0);
4781 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4783 r_chain
= expand_normal (t_chain
);
4785 /* Generate insns to initialize the trampoline. */
4786 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4790 trampolines_created
= 1;
4792 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4793 "trampoline generated for nested function %qD", t_func
);
4800 expand_builtin_adjust_trampoline (tree exp
)
4804 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4807 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4808 tramp
= round_trampoline_addr (tramp
);
4809 if (targetm
.calls
.trampoline_adjust_address
)
4810 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4815 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4816 function. The function first checks whether the back end provides
4817 an insn to implement signbit for the respective mode. If not, it
4818 checks whether the floating point format of the value is such that
4819 the sign bit can be extracted. If that is not the case, the
4820 function returns NULL_RTX to indicate that a normal call should be
4821 emitted rather than expanding the function in-line. EXP is the
4822 expression that is a call to the builtin function; if convenient,
4823 the result should be placed in TARGET. */
4825 expand_builtin_signbit (tree exp
, rtx target
)
4827 const struct real_format
*fmt
;
4828 enum machine_mode fmode
, imode
, rmode
;
4831 enum insn_code icode
;
4833 location_t loc
= EXPR_LOCATION (exp
);
4835 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4838 arg
= CALL_EXPR_ARG (exp
, 0);
4839 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4840 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4841 fmt
= REAL_MODE_FORMAT (fmode
);
4843 arg
= builtin_save_expr (arg
);
4845 /* Expand the argument yielding a RTX expression. */
4846 temp
= expand_normal (arg
);
4848 /* Check if the back end provides an insn that handles signbit for the
4850 icode
= optab_handler (signbit_optab
, fmode
);
4851 if (icode
!= CODE_FOR_nothing
)
4853 rtx last
= get_last_insn ();
4854 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4855 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4857 delete_insns_since (last
);
4860 /* For floating point formats without a sign bit, implement signbit
4862 bitpos
= fmt
->signbit_ro
;
4865 /* But we can't do this if the format supports signed zero. */
4866 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4869 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4870 build_real (TREE_TYPE (arg
), dconst0
));
4871 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4874 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4876 imode
= int_mode_for_mode (fmode
);
4877 if (imode
== BLKmode
)
4879 temp
= gen_lowpart (imode
, temp
);
4884 /* Handle targets with different FP word orders. */
4885 if (FLOAT_WORDS_BIG_ENDIAN
)
4886 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4888 word
= bitpos
/ BITS_PER_WORD
;
4889 temp
= operand_subword_force (temp
, word
, fmode
);
4890 bitpos
= bitpos
% BITS_PER_WORD
;
4893 /* Force the intermediate word_mode (or narrower) result into a
4894 register. This avoids attempting to create paradoxical SUBREGs
4895 of floating point modes below. */
4896 temp
= force_reg (imode
, temp
);
4898 /* If the bitpos is within the "result mode" lowpart, the operation
4899 can be implement with a single bitwise AND. Otherwise, we need
4900 a right shift and an AND. */
4902 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4904 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4906 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4907 temp
= gen_lowpart (rmode
, temp
);
4908 temp
= expand_binop (rmode
, and_optab
, temp
,
4909 immed_wide_int_const (mask
, rmode
),
4910 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4914 /* Perform a logical right shift to place the signbit in the least
4915 significant bit, then truncate the result to the desired mode
4916 and mask just this bit. */
4917 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4918 temp
= gen_lowpart (rmode
, temp
);
4919 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4920 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4926 /* Expand fork or exec calls. TARGET is the desired target of the
4927 call. EXP is the call. FN is the
4928 identificator of the actual function. IGNORE is nonzero if the
4929 value is to be ignored. */
4932 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4937 /* If we are not profiling, just call the function. */
4938 if (!profile_arc_flag
)
4941 /* Otherwise call the wrapper. This should be equivalent for the rest of
4942 compiler, so the code does not diverge, and the wrapper may run the
4943 code necessary for keeping the profiling sane. */
4945 switch (DECL_FUNCTION_CODE (fn
))
4948 id
= get_identifier ("__gcov_fork");
4951 case BUILT_IN_EXECL
:
4952 id
= get_identifier ("__gcov_execl");
4955 case BUILT_IN_EXECV
:
4956 id
= get_identifier ("__gcov_execv");
4959 case BUILT_IN_EXECLP
:
4960 id
= get_identifier ("__gcov_execlp");
4963 case BUILT_IN_EXECLE
:
4964 id
= get_identifier ("__gcov_execle");
4967 case BUILT_IN_EXECVP
:
4968 id
= get_identifier ("__gcov_execvp");
4971 case BUILT_IN_EXECVE
:
4972 id
= get_identifier ("__gcov_execve");
4979 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
4980 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
4981 DECL_EXTERNAL (decl
) = 1;
4982 TREE_PUBLIC (decl
) = 1;
4983 DECL_ARTIFICIAL (decl
) = 1;
4984 TREE_NOTHROW (decl
) = 1;
4985 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
4986 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
4987 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
4988 return expand_call (call
, target
, ignore
);
4993 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4994 the pointer in these functions is void*, the tree optimizers may remove
4995 casts. The mode computed in expand_builtin isn't reliable either, due
4996 to __sync_bool_compare_and_swap.
4998 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4999 group of builtins. This gives us log2 of the mode size. */
5001 static inline enum machine_mode
5002 get_builtin_sync_mode (int fcode_diff
)
5004 /* The size is not negotiable, so ask not to get BLKmode in return
5005 if the target indicates that a smaller size would be better. */
5006 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5009 /* Expand the memory expression LOC and return the appropriate memory operand
5010 for the builtin_sync operations. */
5013 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5017 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5018 addr
= convert_memory_address (Pmode
, addr
);
5020 /* Note that we explicitly do not want any alias information for this
5021 memory, so that we kill all other live memories. Otherwise we don't
5022 satisfy the full barrier semantics of the intrinsic. */
5023 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5025 /* The alignment needs to be at least according to that of the mode. */
5026 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5027 get_pointer_alignment (loc
)));
5028 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5029 MEM_VOLATILE_P (mem
) = 1;
5034 /* Make sure an argument is in the right mode.
5035 EXP is the tree argument.
5036 MODE is the mode it should be in. */
5039 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5042 enum machine_mode old_mode
;
5044 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5045 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5046 of CONST_INTs, where we know the old_mode only from the call argument. */
5048 old_mode
= GET_MODE (val
);
5049 if (old_mode
== VOIDmode
)
5050 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5051 val
= convert_modes (mode
, old_mode
, val
, 1);
5056 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5057 EXP is the CALL_EXPR. CODE is the rtx code
5058 that corresponds to the arithmetic or logical operation from the name;
5059 an exception here is that NOT actually means NAND. TARGET is an optional
5060 place for us to store the results; AFTER is true if this is the
5061 fetch_and_xxx form. */
5064 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5065 enum rtx_code code
, bool after
,
5069 location_t loc
= EXPR_LOCATION (exp
);
5071 if (code
== NOT
&& warn_sync_nand
)
5073 tree fndecl
= get_callee_fndecl (exp
);
5074 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5076 static bool warned_f_a_n
, warned_n_a_f
;
5080 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5081 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5082 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5083 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5084 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5088 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5089 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5090 warned_f_a_n
= true;
5093 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5094 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5095 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5096 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5097 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5101 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5102 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5103 warned_n_a_f
= true;
5111 /* Expand the operands. */
5112 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5113 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5115 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5119 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5120 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5121 true if this is the boolean form. TARGET is a place for us to store the
5122 results; this is NOT optional if IS_BOOL is true. */
5125 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5126 bool is_bool
, rtx target
)
5128 rtx old_val
, new_val
, mem
;
5131 /* Expand the operands. */
5132 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5133 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5134 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5136 pbool
= poval
= NULL
;
5137 if (target
!= const0_rtx
)
5144 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5145 false, MEMMODEL_SEQ_CST
,
5152 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5153 general form is actually an atomic exchange, and some targets only
5154 support a reduced form with the second argument being a constant 1.
5155 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5159 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5164 /* Expand the operands. */
5165 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5166 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5168 return expand_sync_lock_test_and_set (target
, mem
, val
);
5171 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5174 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5178 /* Expand the operands. */
5179 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5181 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5184 /* Given an integer representing an ``enum memmodel'', verify its
5185 correctness and return the memory model enum. */
5187 static enum memmodel
5188 get_memmodel (tree exp
)
5191 unsigned HOST_WIDE_INT val
;
5193 /* If the parameter is not a constant, it's a run time value so we'll just
5194 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5195 if (TREE_CODE (exp
) != INTEGER_CST
)
5196 return MEMMODEL_SEQ_CST
;
5198 op
= expand_normal (exp
);
5201 if (targetm
.memmodel_check
)
5202 val
= targetm
.memmodel_check (val
);
5203 else if (val
& ~MEMMODEL_MASK
)
5205 warning (OPT_Winvalid_memory_model
,
5206 "Unknown architecture specifier in memory model to builtin.");
5207 return MEMMODEL_SEQ_CST
;
5210 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5212 warning (OPT_Winvalid_memory_model
,
5213 "invalid memory model argument to builtin");
5214 return MEMMODEL_SEQ_CST
;
5217 return (enum memmodel
) val
;
5220 /* Expand the __atomic_exchange intrinsic:
5221 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5222 EXP is the CALL_EXPR.
5223 TARGET is an optional place for us to store the results. */
5226 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5229 enum memmodel model
;
5231 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5232 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5234 error ("invalid memory model for %<__atomic_exchange%>");
5238 if (!flag_inline_atomics
)
5241 /* Expand the operands. */
5242 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5243 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5245 return expand_atomic_exchange (target
, mem
, val
, model
);
5248 /* Expand the __atomic_compare_exchange intrinsic:
5249 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5250 TYPE desired, BOOL weak,
5251 enum memmodel success,
5252 enum memmodel failure)
5253 EXP is the CALL_EXPR.
5254 TARGET is an optional place for us to store the results. */
5257 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5260 rtx expect
, desired
, mem
, oldval
, label
;
5261 enum memmodel success
, failure
;
5265 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5266 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5268 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5269 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5271 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5275 if (failure
> success
)
5277 error ("failure memory model cannot be stronger than success "
5278 "memory model for %<__atomic_compare_exchange%>");
5282 if (!flag_inline_atomics
)
5285 /* Expand the operands. */
5286 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5288 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5289 expect
= convert_memory_address (Pmode
, expect
);
5290 expect
= gen_rtx_MEM (mode
, expect
);
5291 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5293 weak
= CALL_EXPR_ARG (exp
, 3);
5295 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5298 if (target
== const0_rtx
)
5301 /* Lest the rtl backend create a race condition with an imporoper store
5302 to memory, always create a new pseudo for OLDVAL. */
5305 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5306 is_weak
, success
, failure
))
5309 /* Conditionally store back to EXPECT, lest we create a race condition
5310 with an improper store to memory. */
5311 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5312 the normal case where EXPECT is totally private, i.e. a register. At
5313 which point the store can be unconditional. */
5314 label
= gen_label_rtx ();
5315 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
, VOIDmode
, 1, label
);
5316 emit_move_insn (expect
, oldval
);
5322 /* Expand the __atomic_load intrinsic:
5323 TYPE __atomic_load (TYPE *object, enum memmodel)
5324 EXP is the CALL_EXPR.
5325 TARGET is an optional place for us to store the results. */
5328 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5331 enum memmodel model
;
5333 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5334 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5335 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5337 error ("invalid memory model for %<__atomic_load%>");
5341 if (!flag_inline_atomics
)
5344 /* Expand the operand. */
5345 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5347 return expand_atomic_load (target
, mem
, model
);
5351 /* Expand the __atomic_store intrinsic:
5352 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5353 EXP is the CALL_EXPR.
5354 TARGET is an optional place for us to store the results. */
5357 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5360 enum memmodel model
;
5362 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5363 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5364 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5365 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5367 error ("invalid memory model for %<__atomic_store%>");
5371 if (!flag_inline_atomics
)
5374 /* Expand the operands. */
5375 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5376 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5378 return expand_atomic_store (mem
, val
, model
, false);
5381 /* Expand the __atomic_fetch_XXX intrinsic:
5382 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5383 EXP is the CALL_EXPR.
5384 TARGET is an optional place for us to store the results.
5385 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5386 FETCH_AFTER is true if returning the result of the operation.
5387 FETCH_AFTER is false if returning the value before the operation.
5388 IGNORE is true if the result is not used.
5389 EXT_CALL is the correct builtin for an external call if this cannot be
5390 resolved to an instruction sequence. */
5393 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5394 enum rtx_code code
, bool fetch_after
,
5395 bool ignore
, enum built_in_function ext_call
)
5398 enum memmodel model
;
5402 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5404 /* Expand the operands. */
5405 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5406 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5408 /* Only try generating instructions if inlining is turned on. */
5409 if (flag_inline_atomics
)
5411 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5416 /* Return if a different routine isn't needed for the library call. */
5417 if (ext_call
== BUILT_IN_NONE
)
5420 /* Change the call to the specified function. */
5421 fndecl
= get_callee_fndecl (exp
);
5422 addr
= CALL_EXPR_FN (exp
);
5425 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5426 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5428 /* Expand the call here so we can emit trailing code. */
5429 ret
= expand_call (exp
, target
, ignore
);
5431 /* Replace the original function just in case it matters. */
5432 TREE_OPERAND (addr
, 0) = fndecl
;
5434 /* Then issue the arithmetic correction to return the right result. */
5439 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5441 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5444 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5451 #ifndef HAVE_atomic_clear
5452 # define HAVE_atomic_clear 0
5453 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5456 /* Expand an atomic clear operation.
5457 void _atomic_clear (BOOL *obj, enum memmodel)
5458 EXP is the call expression. */
5461 expand_builtin_atomic_clear (tree exp
)
5463 enum machine_mode mode
;
5465 enum memmodel model
;
5467 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5468 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5469 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5471 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5472 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5474 error ("invalid memory model for %<__atomic_store%>");
5478 if (HAVE_atomic_clear
)
5480 emit_insn (gen_atomic_clear (mem
, model
));
5484 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5485 Failing that, a store is issued by __atomic_store. The only way this can
5486 fail is if the bool type is larger than a word size. Unlikely, but
5487 handle it anyway for completeness. Assume a single threaded model since
5488 there is no atomic support in this case, and no barriers are required. */
5489 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5491 emit_move_insn (mem
, const0_rtx
);
5495 /* Expand an atomic test_and_set operation.
5496 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5497 EXP is the call expression. */
5500 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5503 enum memmodel model
;
5504 enum machine_mode mode
;
5506 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5507 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5508 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5510 return expand_atomic_test_and_set (target
, mem
, model
);
5514 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5515 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5518 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5521 enum machine_mode mode
;
5522 unsigned int mode_align
, type_align
;
5524 if (TREE_CODE (arg0
) != INTEGER_CST
)
5527 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5528 mode
= mode_for_size (size
, MODE_INT
, 0);
5529 mode_align
= GET_MODE_ALIGNMENT (mode
);
5531 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5532 type_align
= mode_align
;
5535 tree ttype
= TREE_TYPE (arg1
);
5537 /* This function is usually invoked and folded immediately by the front
5538 end before anything else has a chance to look at it. The pointer
5539 parameter at this point is usually cast to a void *, so check for that
5540 and look past the cast. */
5541 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5542 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5543 arg1
= TREE_OPERAND (arg1
, 0);
5545 ttype
= TREE_TYPE (arg1
);
5546 gcc_assert (POINTER_TYPE_P (ttype
));
5548 /* Get the underlying type of the object. */
5549 ttype
= TREE_TYPE (ttype
);
5550 type_align
= TYPE_ALIGN (ttype
);
5553 /* If the object has smaller alignment, the the lock free routines cannot
5555 if (type_align
< mode_align
)
5556 return boolean_false_node
;
5558 /* Check if a compare_and_swap pattern exists for the mode which represents
5559 the required size. The pattern is not allowed to fail, so the existence
5560 of the pattern indicates support is present. */
5561 if (can_compare_and_swap_p (mode
, true))
5562 return boolean_true_node
;
5564 return boolean_false_node
;
5567 /* Return true if the parameters to call EXP represent an object which will
5568 always generate lock free instructions. The first argument represents the
5569 size of the object, and the second parameter is a pointer to the object
5570 itself. If NULL is passed for the object, then the result is based on
5571 typical alignment for an object of the specified size. Otherwise return
5575 expand_builtin_atomic_always_lock_free (tree exp
)
5578 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5579 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5581 if (TREE_CODE (arg0
) != INTEGER_CST
)
5583 error ("non-constant argument 1 to __atomic_always_lock_free");
5587 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5588 if (size
== boolean_true_node
)
5593 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5594 is lock free on this architecture. */
5597 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5599 if (!flag_inline_atomics
)
5602 /* If it isn't always lock free, don't generate a result. */
5603 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5604 return boolean_true_node
;
5609 /* Return true if the parameters to call EXP represent an object which will
5610 always generate lock free instructions. The first argument represents the
5611 size of the object, and the second parameter is a pointer to the object
5612 itself. If NULL is passed for the object, then the result is based on
5613 typical alignment for an object of the specified size. Otherwise return
5617 expand_builtin_atomic_is_lock_free (tree exp
)
5620 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5621 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5623 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5625 error ("non-integer argument 1 to __atomic_is_lock_free");
5629 if (!flag_inline_atomics
)
5632 /* If the value is known at compile time, return the RTX for it. */
5633 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5634 if (size
== boolean_true_node
)
5640 /* Expand the __atomic_thread_fence intrinsic:
5641 void __atomic_thread_fence (enum memmodel)
5642 EXP is the CALL_EXPR. */
5645 expand_builtin_atomic_thread_fence (tree exp
)
5647 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5648 expand_mem_thread_fence (model
);
5651 /* Expand the __atomic_signal_fence intrinsic:
5652 void __atomic_signal_fence (enum memmodel)
5653 EXP is the CALL_EXPR. */
5656 expand_builtin_atomic_signal_fence (tree exp
)
5658 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5659 expand_mem_signal_fence (model
);
5662 /* Expand the __sync_synchronize intrinsic. */
5665 expand_builtin_sync_synchronize (void)
5667 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5671 expand_builtin_thread_pointer (tree exp
, rtx target
)
5673 enum insn_code icode
;
5674 if (!validate_arglist (exp
, VOID_TYPE
))
5676 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5677 if (icode
!= CODE_FOR_nothing
)
5679 struct expand_operand op
;
5680 /* If the target is not sutitable then create a new target. */
5681 if (target
== NULL_RTX
5683 || GET_MODE (target
) != Pmode
)
5684 target
= gen_reg_rtx (Pmode
);
5685 create_output_operand (&op
, target
, Pmode
);
5686 expand_insn (icode
, 1, &op
);
5689 error ("__builtin_thread_pointer is not supported on this target");
5694 expand_builtin_set_thread_pointer (tree exp
)
5696 enum insn_code icode
;
5697 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5699 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5700 if (icode
!= CODE_FOR_nothing
)
5702 struct expand_operand op
;
5703 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5704 Pmode
, EXPAND_NORMAL
);
5705 create_input_operand (&op
, val
, Pmode
);
5706 expand_insn (icode
, 1, &op
);
5709 error ("__builtin_set_thread_pointer is not supported on this target");
5713 /* Emit code to restore the current value of stack. */
5716 expand_stack_restore (tree var
)
5718 rtx prev
, sa
= expand_normal (var
);
5720 sa
= convert_memory_address (Pmode
, sa
);
5722 prev
= get_last_insn ();
5723 emit_stack_restore (SAVE_BLOCK
, sa
);
5724 fixup_args_size_notes (prev
, get_last_insn (), 0);
5728 /* Emit code to save the current value of stack. */
5731 expand_stack_save (void)
5735 do_pending_stack_adjust ();
5736 emit_stack_save (SAVE_BLOCK
, &ret
);
5740 /* Expand an expression EXP that calls a built-in function,
5741 with result going to TARGET if that's convenient
5742 (and in mode MODE if that's convenient).
5743 SUBTARGET may be used as the target for computing one of EXP's operands.
5744 IGNORE is nonzero if the value is to be ignored. */
5747 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5750 tree fndecl
= get_callee_fndecl (exp
);
5751 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5752 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5755 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5756 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5758 /* When not optimizing, generate calls to library functions for a certain
5761 && !called_as_built_in (fndecl
)
5762 && fcode
!= BUILT_IN_FORK
5763 && fcode
!= BUILT_IN_EXECL
5764 && fcode
!= BUILT_IN_EXECV
5765 && fcode
!= BUILT_IN_EXECLP
5766 && fcode
!= BUILT_IN_EXECLE
5767 && fcode
!= BUILT_IN_EXECVP
5768 && fcode
!= BUILT_IN_EXECVE
5769 && fcode
!= BUILT_IN_ALLOCA
5770 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5771 && fcode
!= BUILT_IN_FREE
)
5772 return expand_call (exp
, target
, ignore
);
5774 /* The built-in function expanders test for target == const0_rtx
5775 to determine whether the function's result will be ignored. */
5777 target
= const0_rtx
;
5779 /* If the result of a pure or const built-in function is ignored, and
5780 none of its arguments are volatile, we can avoid expanding the
5781 built-in call and just evaluate the arguments for side-effects. */
5782 if (target
== const0_rtx
5783 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5784 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5786 bool volatilep
= false;
5788 call_expr_arg_iterator iter
;
5790 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5791 if (TREE_THIS_VOLATILE (arg
))
5799 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5800 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5807 CASE_FLT_FN (BUILT_IN_FABS
):
5808 case BUILT_IN_FABSD32
:
5809 case BUILT_IN_FABSD64
:
5810 case BUILT_IN_FABSD128
:
5811 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5816 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5817 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5822 /* Just do a normal library call if we were unable to fold
5824 CASE_FLT_FN (BUILT_IN_CABS
):
5827 CASE_FLT_FN (BUILT_IN_EXP
):
5828 CASE_FLT_FN (BUILT_IN_EXP10
):
5829 CASE_FLT_FN (BUILT_IN_POW10
):
5830 CASE_FLT_FN (BUILT_IN_EXP2
):
5831 CASE_FLT_FN (BUILT_IN_EXPM1
):
5832 CASE_FLT_FN (BUILT_IN_LOGB
):
5833 CASE_FLT_FN (BUILT_IN_LOG
):
5834 CASE_FLT_FN (BUILT_IN_LOG10
):
5835 CASE_FLT_FN (BUILT_IN_LOG2
):
5836 CASE_FLT_FN (BUILT_IN_LOG1P
):
5837 CASE_FLT_FN (BUILT_IN_TAN
):
5838 CASE_FLT_FN (BUILT_IN_ASIN
):
5839 CASE_FLT_FN (BUILT_IN_ACOS
):
5840 CASE_FLT_FN (BUILT_IN_ATAN
):
5841 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5842 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5843 because of possible accuracy problems. */
5844 if (! flag_unsafe_math_optimizations
)
5846 CASE_FLT_FN (BUILT_IN_SQRT
):
5847 CASE_FLT_FN (BUILT_IN_FLOOR
):
5848 CASE_FLT_FN (BUILT_IN_CEIL
):
5849 CASE_FLT_FN (BUILT_IN_TRUNC
):
5850 CASE_FLT_FN (BUILT_IN_ROUND
):
5851 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5852 CASE_FLT_FN (BUILT_IN_RINT
):
5853 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5858 CASE_FLT_FN (BUILT_IN_FMA
):
5859 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5864 CASE_FLT_FN (BUILT_IN_ILOGB
):
5865 if (! flag_unsafe_math_optimizations
)
5867 CASE_FLT_FN (BUILT_IN_ISINF
):
5868 CASE_FLT_FN (BUILT_IN_FINITE
):
5869 case BUILT_IN_ISFINITE
:
5870 case BUILT_IN_ISNORMAL
:
5871 target
= expand_builtin_interclass_mathfn (exp
, target
);
5876 CASE_FLT_FN (BUILT_IN_ICEIL
):
5877 CASE_FLT_FN (BUILT_IN_LCEIL
):
5878 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5879 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5880 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5881 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5882 target
= expand_builtin_int_roundingfn (exp
, target
);
5887 CASE_FLT_FN (BUILT_IN_IRINT
):
5888 CASE_FLT_FN (BUILT_IN_LRINT
):
5889 CASE_FLT_FN (BUILT_IN_LLRINT
):
5890 CASE_FLT_FN (BUILT_IN_IROUND
):
5891 CASE_FLT_FN (BUILT_IN_LROUND
):
5892 CASE_FLT_FN (BUILT_IN_LLROUND
):
5893 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5898 CASE_FLT_FN (BUILT_IN_POWI
):
5899 target
= expand_builtin_powi (exp
, target
);
5904 CASE_FLT_FN (BUILT_IN_ATAN2
):
5905 CASE_FLT_FN (BUILT_IN_LDEXP
):
5906 CASE_FLT_FN (BUILT_IN_SCALB
):
5907 CASE_FLT_FN (BUILT_IN_SCALBN
):
5908 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5909 if (! flag_unsafe_math_optimizations
)
5912 CASE_FLT_FN (BUILT_IN_FMOD
):
5913 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5914 CASE_FLT_FN (BUILT_IN_DREM
):
5915 CASE_FLT_FN (BUILT_IN_POW
):
5916 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5921 CASE_FLT_FN (BUILT_IN_CEXPI
):
5922 target
= expand_builtin_cexpi (exp
, target
);
5923 gcc_assert (target
);
5926 CASE_FLT_FN (BUILT_IN_SIN
):
5927 CASE_FLT_FN (BUILT_IN_COS
):
5928 if (! flag_unsafe_math_optimizations
)
5930 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5935 CASE_FLT_FN (BUILT_IN_SINCOS
):
5936 if (! flag_unsafe_math_optimizations
)
5938 target
= expand_builtin_sincos (exp
);
5943 case BUILT_IN_APPLY_ARGS
:
5944 return expand_builtin_apply_args ();
5946 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5947 FUNCTION with a copy of the parameters described by
5948 ARGUMENTS, and ARGSIZE. It returns a block of memory
5949 allocated on the stack into which is stored all the registers
5950 that might possibly be used for returning the result of a
5951 function. ARGUMENTS is the value returned by
5952 __builtin_apply_args. ARGSIZE is the number of bytes of
5953 arguments that must be copied. ??? How should this value be
5954 computed? We'll also need a safe worst case value for varargs
5956 case BUILT_IN_APPLY
:
5957 if (!validate_arglist (exp
, POINTER_TYPE
,
5958 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5959 && !validate_arglist (exp
, REFERENCE_TYPE
,
5960 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5966 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5967 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5968 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5970 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5973 /* __builtin_return (RESULT) causes the function to return the
5974 value described by RESULT. RESULT is address of the block of
5975 memory returned by __builtin_apply. */
5976 case BUILT_IN_RETURN
:
5977 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5978 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
5981 case BUILT_IN_SAVEREGS
:
5982 return expand_builtin_saveregs ();
5984 case BUILT_IN_VA_ARG_PACK
:
5985 /* All valid uses of __builtin_va_arg_pack () are removed during
5987 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
5990 case BUILT_IN_VA_ARG_PACK_LEN
:
5991 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5993 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
5996 /* Return the address of the first anonymous stack arg. */
5997 case BUILT_IN_NEXT_ARG
:
5998 if (fold_builtin_next_arg (exp
, false))
6000 return expand_builtin_next_arg ();
6002 case BUILT_IN_CLEAR_CACHE
:
6003 target
= expand_builtin___clear_cache (exp
);
6008 case BUILT_IN_CLASSIFY_TYPE
:
6009 return expand_builtin_classify_type (exp
);
6011 case BUILT_IN_CONSTANT_P
:
6014 case BUILT_IN_FRAME_ADDRESS
:
6015 case BUILT_IN_RETURN_ADDRESS
:
6016 return expand_builtin_frame_address (fndecl
, exp
);
6018 /* Returns the address of the area where the structure is returned.
6020 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6021 if (call_expr_nargs (exp
) != 0
6022 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6023 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6026 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6028 case BUILT_IN_ALLOCA
:
6029 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6030 /* If the allocation stems from the declaration of a variable-sized
6031 object, it cannot accumulate. */
6032 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6037 case BUILT_IN_STACK_SAVE
:
6038 return expand_stack_save ();
6040 case BUILT_IN_STACK_RESTORE
:
6041 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6044 case BUILT_IN_BSWAP16
:
6045 case BUILT_IN_BSWAP32
:
6046 case BUILT_IN_BSWAP64
:
6047 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6052 CASE_INT_FN (BUILT_IN_FFS
):
6053 target
= expand_builtin_unop (target_mode
, exp
, target
,
6054 subtarget
, ffs_optab
);
6059 CASE_INT_FN (BUILT_IN_CLZ
):
6060 target
= expand_builtin_unop (target_mode
, exp
, target
,
6061 subtarget
, clz_optab
);
6066 CASE_INT_FN (BUILT_IN_CTZ
):
6067 target
= expand_builtin_unop (target_mode
, exp
, target
,
6068 subtarget
, ctz_optab
);
6073 CASE_INT_FN (BUILT_IN_CLRSB
):
6074 target
= expand_builtin_unop (target_mode
, exp
, target
,
6075 subtarget
, clrsb_optab
);
6080 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6081 target
= expand_builtin_unop (target_mode
, exp
, target
,
6082 subtarget
, popcount_optab
);
6087 CASE_INT_FN (BUILT_IN_PARITY
):
6088 target
= expand_builtin_unop (target_mode
, exp
, target
,
6089 subtarget
, parity_optab
);
6094 case BUILT_IN_STRLEN
:
6095 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6100 case BUILT_IN_STRCPY
:
6101 target
= expand_builtin_strcpy (exp
, target
);
6106 case BUILT_IN_STRNCPY
:
6107 target
= expand_builtin_strncpy (exp
, target
);
6112 case BUILT_IN_STPCPY
:
6113 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6118 case BUILT_IN_MEMCPY
:
6119 target
= expand_builtin_memcpy (exp
, target
);
6124 case BUILT_IN_MEMPCPY
:
6125 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6130 case BUILT_IN_MEMSET
:
6131 target
= expand_builtin_memset (exp
, target
, mode
);
6136 case BUILT_IN_BZERO
:
6137 target
= expand_builtin_bzero (exp
);
6142 case BUILT_IN_STRCMP
:
6143 target
= expand_builtin_strcmp (exp
, target
);
6148 case BUILT_IN_STRNCMP
:
6149 target
= expand_builtin_strncmp (exp
, target
, mode
);
6155 case BUILT_IN_MEMCMP
:
6156 target
= expand_builtin_memcmp (exp
, target
, mode
);
6161 case BUILT_IN_SETJMP
:
6162 /* This should have been lowered to the builtins below. */
6165 case BUILT_IN_SETJMP_SETUP
:
6166 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6167 and the receiver label. */
6168 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6170 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6171 VOIDmode
, EXPAND_NORMAL
);
6172 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6173 rtx label_r
= label_rtx (label
);
6175 /* This is copied from the handling of non-local gotos. */
6176 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6177 nonlocal_goto_handler_labels
6178 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6179 nonlocal_goto_handler_labels
);
6180 /* ??? Do not let expand_label treat us as such since we would
6181 not want to be both on the list of non-local labels and on
6182 the list of forced labels. */
6183 FORCED_LABEL (label
) = 0;
6188 case BUILT_IN_SETJMP_RECEIVER
:
6189 /* __builtin_setjmp_receiver is passed the receiver label. */
6190 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6192 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6193 rtx label_r
= label_rtx (label
);
6195 expand_builtin_setjmp_receiver (label_r
);
6200 /* __builtin_longjmp is passed a pointer to an array of five words.
6201 It's similar to the C library longjmp function but works with
6202 __builtin_setjmp above. */
6203 case BUILT_IN_LONGJMP
:
6204 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6206 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6207 VOIDmode
, EXPAND_NORMAL
);
6208 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6210 if (value
!= const1_rtx
)
6212 error ("%<__builtin_longjmp%> second argument must be 1");
6216 expand_builtin_longjmp (buf_addr
, value
);
6221 case BUILT_IN_NONLOCAL_GOTO
:
6222 target
= expand_builtin_nonlocal_goto (exp
);
6227 /* This updates the setjmp buffer that is its argument with the value
6228 of the current stack pointer. */
6229 case BUILT_IN_UPDATE_SETJMP_BUF
:
6230 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6233 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6235 expand_builtin_update_setjmp_buf (buf_addr
);
6241 expand_builtin_trap ();
6244 case BUILT_IN_UNREACHABLE
:
6245 expand_builtin_unreachable ();
6248 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6249 case BUILT_IN_SIGNBITD32
:
6250 case BUILT_IN_SIGNBITD64
:
6251 case BUILT_IN_SIGNBITD128
:
6252 target
= expand_builtin_signbit (exp
, target
);
6257 /* Various hooks for the DWARF 2 __throw routine. */
6258 case BUILT_IN_UNWIND_INIT
:
6259 expand_builtin_unwind_init ();
6261 case BUILT_IN_DWARF_CFA
:
6262 return virtual_cfa_rtx
;
6263 #ifdef DWARF2_UNWIND_INFO
6264 case BUILT_IN_DWARF_SP_COLUMN
:
6265 return expand_builtin_dwarf_sp_column ();
6266 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6267 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6270 case BUILT_IN_FROB_RETURN_ADDR
:
6271 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6272 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6273 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6274 case BUILT_IN_EH_RETURN
:
6275 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6276 CALL_EXPR_ARG (exp
, 1));
6278 #ifdef EH_RETURN_DATA_REGNO
6279 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6280 return expand_builtin_eh_return_data_regno (exp
);
6282 case BUILT_IN_EXTEND_POINTER
:
6283 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6284 case BUILT_IN_EH_POINTER
:
6285 return expand_builtin_eh_pointer (exp
);
6286 case BUILT_IN_EH_FILTER
:
6287 return expand_builtin_eh_filter (exp
);
6288 case BUILT_IN_EH_COPY_VALUES
:
6289 return expand_builtin_eh_copy_values (exp
);
6291 case BUILT_IN_VA_START
:
6292 return expand_builtin_va_start (exp
);
6293 case BUILT_IN_VA_END
:
6294 return expand_builtin_va_end (exp
);
6295 case BUILT_IN_VA_COPY
:
6296 return expand_builtin_va_copy (exp
);
6297 case BUILT_IN_EXPECT
:
6298 return expand_builtin_expect (exp
, target
);
6299 case BUILT_IN_ASSUME_ALIGNED
:
6300 return expand_builtin_assume_aligned (exp
, target
);
6301 case BUILT_IN_PREFETCH
:
6302 expand_builtin_prefetch (exp
);
6305 case BUILT_IN_INIT_TRAMPOLINE
:
6306 return expand_builtin_init_trampoline (exp
, true);
6307 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6308 return expand_builtin_init_trampoline (exp
, false);
6309 case BUILT_IN_ADJUST_TRAMPOLINE
:
6310 return expand_builtin_adjust_trampoline (exp
);
6313 case BUILT_IN_EXECL
:
6314 case BUILT_IN_EXECV
:
6315 case BUILT_IN_EXECLP
:
6316 case BUILT_IN_EXECLE
:
6317 case BUILT_IN_EXECVP
:
6318 case BUILT_IN_EXECVE
:
6319 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6324 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6325 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6326 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6327 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6328 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6329 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6330 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6335 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6336 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6337 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6338 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6339 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6340 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6341 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6346 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6347 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6348 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6349 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6350 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6351 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6352 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6357 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6358 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6359 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6360 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6361 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6362 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6363 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6368 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6369 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6370 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6371 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6372 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6373 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6374 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6379 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6380 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6381 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6382 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6383 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6384 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6385 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6390 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6391 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6392 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6393 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6394 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6395 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6396 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6401 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6402 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6403 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6404 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6405 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6406 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6407 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6412 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6413 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6414 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6415 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6416 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6417 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6418 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6423 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6424 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6425 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6426 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6427 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6428 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6429 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6434 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6435 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6436 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6437 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6438 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6439 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6440 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6445 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6446 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6447 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6448 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6449 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6450 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6451 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6456 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6457 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6458 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6459 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6460 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6461 if (mode
== VOIDmode
)
6462 mode
= TYPE_MODE (boolean_type_node
);
6463 if (!target
|| !register_operand (target
, mode
))
6464 target
= gen_reg_rtx (mode
);
6466 mode
= get_builtin_sync_mode
6467 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6468 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6473 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6474 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6475 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6476 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6477 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6478 mode
= get_builtin_sync_mode
6479 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6480 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6485 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6486 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6487 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6488 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6489 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6490 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6491 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6496 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6497 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6498 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6499 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6500 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6501 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6502 expand_builtin_sync_lock_release (mode
, exp
);
6505 case BUILT_IN_SYNC_SYNCHRONIZE
:
6506 expand_builtin_sync_synchronize ();
6509 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6510 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6511 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6512 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6513 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6514 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6515 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6520 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6521 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6522 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6523 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6524 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6526 unsigned int nargs
, z
;
6527 vec
<tree
, va_gc
> *vec
;
6530 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6531 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6535 /* If this is turned into an external library call, the weak parameter
6536 must be dropped to match the expected parameter list. */
6537 nargs
= call_expr_nargs (exp
);
6538 vec_alloc (vec
, nargs
- 1);
6539 for (z
= 0; z
< 3; z
++)
6540 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6541 /* Skip the boolean weak parameter. */
6542 for (z
= 4; z
< 6; z
++)
6543 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6544 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6548 case BUILT_IN_ATOMIC_LOAD_1
:
6549 case BUILT_IN_ATOMIC_LOAD_2
:
6550 case BUILT_IN_ATOMIC_LOAD_4
:
6551 case BUILT_IN_ATOMIC_LOAD_8
:
6552 case BUILT_IN_ATOMIC_LOAD_16
:
6553 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6554 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6559 case BUILT_IN_ATOMIC_STORE_1
:
6560 case BUILT_IN_ATOMIC_STORE_2
:
6561 case BUILT_IN_ATOMIC_STORE_4
:
6562 case BUILT_IN_ATOMIC_STORE_8
:
6563 case BUILT_IN_ATOMIC_STORE_16
:
6564 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6565 target
= expand_builtin_atomic_store (mode
, exp
);
6570 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6571 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6572 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6573 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6574 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6576 enum built_in_function lib
;
6577 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6578 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6579 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6580 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6586 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6587 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6588 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6589 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6590 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6592 enum built_in_function lib
;
6593 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6594 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6595 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6596 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6602 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6603 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6604 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6605 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6606 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6608 enum built_in_function lib
;
6609 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6610 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6611 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6612 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6618 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6619 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6620 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6621 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6622 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6624 enum built_in_function lib
;
6625 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6626 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6627 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6628 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6634 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6635 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6636 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6637 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6638 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6640 enum built_in_function lib
;
6641 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6642 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6643 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6644 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6650 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6651 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6652 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6653 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6654 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6656 enum built_in_function lib
;
6657 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6658 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6659 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6660 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6666 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6667 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6668 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6669 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6670 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6671 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6672 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6673 ignore
, BUILT_IN_NONE
);
6678 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6679 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6680 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6681 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6682 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6683 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6684 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6685 ignore
, BUILT_IN_NONE
);
6690 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6691 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6692 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6693 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6694 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6695 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6696 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6697 ignore
, BUILT_IN_NONE
);
6702 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6703 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6704 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6705 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6706 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6707 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6708 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6709 ignore
, BUILT_IN_NONE
);
6714 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6715 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6716 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6717 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6718 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6719 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6720 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6721 ignore
, BUILT_IN_NONE
);
6726 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6727 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6728 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6729 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6730 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6731 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6732 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6733 ignore
, BUILT_IN_NONE
);
6738 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6739 return expand_builtin_atomic_test_and_set (exp
, target
);
6741 case BUILT_IN_ATOMIC_CLEAR
:
6742 return expand_builtin_atomic_clear (exp
);
6744 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6745 return expand_builtin_atomic_always_lock_free (exp
);
6747 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6748 target
= expand_builtin_atomic_is_lock_free (exp
);
6753 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6754 expand_builtin_atomic_thread_fence (exp
);
6757 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6758 expand_builtin_atomic_signal_fence (exp
);
6761 case BUILT_IN_OBJECT_SIZE
:
6762 return expand_builtin_object_size (exp
);
6764 case BUILT_IN_MEMCPY_CHK
:
6765 case BUILT_IN_MEMPCPY_CHK
:
6766 case BUILT_IN_MEMMOVE_CHK
:
6767 case BUILT_IN_MEMSET_CHK
:
6768 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6773 case BUILT_IN_STRCPY_CHK
:
6774 case BUILT_IN_STPCPY_CHK
:
6775 case BUILT_IN_STRNCPY_CHK
:
6776 case BUILT_IN_STPNCPY_CHK
:
6777 case BUILT_IN_STRCAT_CHK
:
6778 case BUILT_IN_STRNCAT_CHK
:
6779 case BUILT_IN_SNPRINTF_CHK
:
6780 case BUILT_IN_VSNPRINTF_CHK
:
6781 maybe_emit_chk_warning (exp
, fcode
);
6784 case BUILT_IN_SPRINTF_CHK
:
6785 case BUILT_IN_VSPRINTF_CHK
:
6786 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6790 if (warn_free_nonheap_object
)
6791 maybe_emit_free_warning (exp
);
6794 case BUILT_IN_THREAD_POINTER
:
6795 return expand_builtin_thread_pointer (exp
, target
);
6797 case BUILT_IN_SET_THREAD_POINTER
:
6798 expand_builtin_set_thread_pointer (exp
);
6801 case BUILT_IN_CILK_DETACH
:
6802 expand_builtin_cilk_detach (exp
);
6805 case BUILT_IN_CILK_POP_FRAME
:
6806 expand_builtin_cilk_pop_frame (exp
);
6809 default: /* just do library call, if unknown builtin */
6813 /* The switch statement above can drop through to cause the function
6814 to be called normally. */
6815 return expand_call (exp
, target
, ignore
);
6818 /* Determine whether a tree node represents a call to a built-in
6819 function. If the tree T is a call to a built-in function with
6820 the right number of arguments of the appropriate types, return
6821 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6822 Otherwise the return value is END_BUILTINS. */
6824 enum built_in_function
6825 builtin_mathfn_code (const_tree t
)
6827 const_tree fndecl
, arg
, parmlist
;
6828 const_tree argtype
, parmtype
;
6829 const_call_expr_arg_iterator iter
;
6831 if (TREE_CODE (t
) != CALL_EXPR
6832 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6833 return END_BUILTINS
;
6835 fndecl
= get_callee_fndecl (t
);
6836 if (fndecl
== NULL_TREE
6837 || TREE_CODE (fndecl
) != FUNCTION_DECL
6838 || ! DECL_BUILT_IN (fndecl
)
6839 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6840 return END_BUILTINS
;
6842 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6843 init_const_call_expr_arg_iterator (t
, &iter
);
6844 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6846 /* If a function doesn't take a variable number of arguments,
6847 the last element in the list will have type `void'. */
6848 parmtype
= TREE_VALUE (parmlist
);
6849 if (VOID_TYPE_P (parmtype
))
6851 if (more_const_call_expr_args_p (&iter
))
6852 return END_BUILTINS
;
6853 return DECL_FUNCTION_CODE (fndecl
);
6856 if (! more_const_call_expr_args_p (&iter
))
6857 return END_BUILTINS
;
6859 arg
= next_const_call_expr_arg (&iter
);
6860 argtype
= TREE_TYPE (arg
);
6862 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6864 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6865 return END_BUILTINS
;
6867 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6869 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6870 return END_BUILTINS
;
6872 else if (POINTER_TYPE_P (parmtype
))
6874 if (! POINTER_TYPE_P (argtype
))
6875 return END_BUILTINS
;
6877 else if (INTEGRAL_TYPE_P (parmtype
))
6879 if (! INTEGRAL_TYPE_P (argtype
))
6880 return END_BUILTINS
;
6883 return END_BUILTINS
;
6886 /* Variable-length argument list. */
6887 return DECL_FUNCTION_CODE (fndecl
);
6890 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6891 evaluate to a constant. */
6894 fold_builtin_constant_p (tree arg
)
6896 /* We return 1 for a numeric type that's known to be a constant
6897 value at compile-time or for an aggregate type that's a
6898 literal constant. */
6901 /* If we know this is a constant, emit the constant of one. */
6902 if (CONSTANT_CLASS_P (arg
)
6903 || (TREE_CODE (arg
) == CONSTRUCTOR
6904 && TREE_CONSTANT (arg
)))
6905 return integer_one_node
;
6906 if (TREE_CODE (arg
) == ADDR_EXPR
)
6908 tree op
= TREE_OPERAND (arg
, 0);
6909 if (TREE_CODE (op
) == STRING_CST
6910 || (TREE_CODE (op
) == ARRAY_REF
6911 && integer_zerop (TREE_OPERAND (op
, 1))
6912 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6913 return integer_one_node
;
6916 /* If this expression has side effects, show we don't know it to be a
6917 constant. Likewise if it's a pointer or aggregate type since in
6918 those case we only want literals, since those are only optimized
6919 when generating RTL, not later.
6920 And finally, if we are compiling an initializer, not code, we
6921 need to return a definite result now; there's not going to be any
6922 more optimization done. */
6923 if (TREE_SIDE_EFFECTS (arg
)
6924 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6925 || POINTER_TYPE_P (TREE_TYPE (arg
))
6927 || folding_initializer
6928 || force_folding_builtin_constant_p
)
6929 return integer_zero_node
;
6934 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6935 return it as a truthvalue. */
6938 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
6941 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6943 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6944 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6945 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6946 pred_type
= TREE_VALUE (arg_types
);
6947 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6949 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6950 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6951 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
6954 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6955 build_int_cst (ret_type
, 0));
6958 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6959 NULL_TREE if no simplification is possible. */
6962 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
6964 tree inner
, fndecl
, inner_arg0
;
6965 enum tree_code code
;
6967 /* Distribute the expected value over short-circuiting operators.
6968 See through the cast from truthvalue_type_node to long. */
6970 while (TREE_CODE (inner_arg0
) == NOP_EXPR
6971 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
6972 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
6973 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
6975 /* If this is a builtin_expect within a builtin_expect keep the
6976 inner one. See through a comparison against a constant. It
6977 might have been added to create a thruthvalue. */
6980 if (COMPARISON_CLASS_P (inner
)
6981 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6982 inner
= TREE_OPERAND (inner
, 0);
6984 if (TREE_CODE (inner
) == CALL_EXPR
6985 && (fndecl
= get_callee_fndecl (inner
))
6986 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6987 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
6991 code
= TREE_CODE (inner
);
6992 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6994 tree op0
= TREE_OPERAND (inner
, 0);
6995 tree op1
= TREE_OPERAND (inner
, 1);
6997 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
6998 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
6999 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7001 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7004 /* If the argument isn't invariant then there's nothing else we can do. */
7005 if (!TREE_CONSTANT (inner_arg0
))
7008 /* If we expect that a comparison against the argument will fold to
7009 a constant return the constant. In practice, this means a true
7010 constant or the address of a non-weak symbol. */
7013 if (TREE_CODE (inner
) == ADDR_EXPR
)
7017 inner
= TREE_OPERAND (inner
, 0);
7019 while (TREE_CODE (inner
) == COMPONENT_REF
7020 || TREE_CODE (inner
) == ARRAY_REF
);
7021 if ((TREE_CODE (inner
) == VAR_DECL
7022 || TREE_CODE (inner
) == FUNCTION_DECL
)
7023 && DECL_WEAK (inner
))
7027 /* Otherwise, ARG0 already has the proper type for the return value. */
7031 /* Fold a call to __builtin_classify_type with argument ARG. */
7034 fold_builtin_classify_type (tree arg
)
7037 return build_int_cst (integer_type_node
, no_type_class
);
7039 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7042 /* Fold a call to __builtin_strlen with argument ARG. */
7045 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7047 if (!validate_arg (arg
, POINTER_TYPE
))
7051 tree len
= c_strlen (arg
, 0);
7054 return fold_convert_loc (loc
, type
, len
);
7060 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7063 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7065 REAL_VALUE_TYPE real
;
7067 /* __builtin_inff is intended to be usable to define INFINITY on all
7068 targets. If an infinity is not available, INFINITY expands "to a
7069 positive constant of type float that overflows at translation
7070 time", footnote "In this case, using INFINITY will violate the
7071 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7072 Thus we pedwarn to ensure this constraint violation is
7074 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7075 pedwarn (loc
, 0, "target format does not support infinity");
7078 return build_real (type
, real
);
7081 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7084 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7086 REAL_VALUE_TYPE real
;
7089 if (!validate_arg (arg
, POINTER_TYPE
))
7091 str
= c_getstr (arg
);
7095 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7098 return build_real (type
, real
);
7101 /* Return true if the floating point expression T has an integer value.
7102 We also allow +Inf, -Inf and NaN to be considered integer values. */
7105 integer_valued_real_p (tree t
)
7107 switch (TREE_CODE (t
))
7114 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7119 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7126 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7127 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7130 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7131 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7134 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7138 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7139 if (TREE_CODE (type
) == INTEGER_TYPE
)
7141 if (TREE_CODE (type
) == REAL_TYPE
)
7142 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7147 switch (builtin_mathfn_code (t
))
7149 CASE_FLT_FN (BUILT_IN_CEIL
):
7150 CASE_FLT_FN (BUILT_IN_FLOOR
):
7151 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7152 CASE_FLT_FN (BUILT_IN_RINT
):
7153 CASE_FLT_FN (BUILT_IN_ROUND
):
7154 CASE_FLT_FN (BUILT_IN_TRUNC
):
7157 CASE_FLT_FN (BUILT_IN_FMIN
):
7158 CASE_FLT_FN (BUILT_IN_FMAX
):
7159 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7160 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7173 /* FNDECL is assumed to be a builtin where truncation can be propagated
7174 across (for instance floor((double)f) == (double)floorf (f).
7175 Do the transformation for a call with argument ARG. */
7178 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7180 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7182 if (!validate_arg (arg
, REAL_TYPE
))
7185 /* Integer rounding functions are idempotent. */
7186 if (fcode
== builtin_mathfn_code (arg
))
7189 /* If argument is already integer valued, and we don't need to worry
7190 about setting errno, there's no need to perform rounding. */
7191 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7196 tree arg0
= strip_float_extensions (arg
);
7197 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7198 tree newtype
= TREE_TYPE (arg0
);
7201 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7202 && (decl
= mathfn_built_in (newtype
, fcode
)))
7203 return fold_convert_loc (loc
, ftype
,
7204 build_call_expr_loc (loc
, decl
, 1,
7205 fold_convert_loc (loc
,
7212 /* FNDECL is assumed to be builtin which can narrow the FP type of
7213 the argument, for instance lround((double)f) -> lroundf (f).
7214 Do the transformation for a call with argument ARG. */
7217 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7219 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7221 if (!validate_arg (arg
, REAL_TYPE
))
7224 /* If argument is already integer valued, and we don't need to worry
7225 about setting errno, there's no need to perform rounding. */
7226 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7227 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7228 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7232 tree ftype
= TREE_TYPE (arg
);
7233 tree arg0
= strip_float_extensions (arg
);
7234 tree newtype
= TREE_TYPE (arg0
);
7237 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7238 && (decl
= mathfn_built_in (newtype
, fcode
)))
7239 return build_call_expr_loc (loc
, decl
, 1,
7240 fold_convert_loc (loc
, newtype
, arg0
));
7243 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7244 sizeof (int) == sizeof (long). */
7245 if (TYPE_PRECISION (integer_type_node
)
7246 == TYPE_PRECISION (long_integer_type_node
))
7248 tree newfn
= NULL_TREE
;
7251 CASE_FLT_FN (BUILT_IN_ICEIL
):
7252 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7255 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7256 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7259 CASE_FLT_FN (BUILT_IN_IROUND
):
7260 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7263 CASE_FLT_FN (BUILT_IN_IRINT
):
7264 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7273 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7274 return fold_convert_loc (loc
,
7275 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7279 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7280 sizeof (long long) == sizeof (long). */
7281 if (TYPE_PRECISION (long_long_integer_type_node
)
7282 == TYPE_PRECISION (long_integer_type_node
))
7284 tree newfn
= NULL_TREE
;
7287 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7288 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7291 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7292 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7295 CASE_FLT_FN (BUILT_IN_LLROUND
):
7296 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7299 CASE_FLT_FN (BUILT_IN_LLRINT
):
7300 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7309 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7310 return fold_convert_loc (loc
,
7311 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7318 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7319 return type. Return NULL_TREE if no simplification can be made. */
7322 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7326 if (!validate_arg (arg
, COMPLEX_TYPE
)
7327 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7330 /* Calculate the result when the argument is a constant. */
7331 if (TREE_CODE (arg
) == COMPLEX_CST
7332 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7336 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7338 tree real
= TREE_OPERAND (arg
, 0);
7339 tree imag
= TREE_OPERAND (arg
, 1);
7341 /* If either part is zero, cabs is fabs of the other. */
7342 if (real_zerop (real
))
7343 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7344 if (real_zerop (imag
))
7345 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7347 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7348 if (flag_unsafe_math_optimizations
7349 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7351 const REAL_VALUE_TYPE sqrt2_trunc
7352 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7354 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7355 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7356 build_real (type
, sqrt2_trunc
));
7360 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7361 if (TREE_CODE (arg
) == NEGATE_EXPR
7362 || TREE_CODE (arg
) == CONJ_EXPR
)
7363 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7365 /* Don't do this when optimizing for size. */
7366 if (flag_unsafe_math_optimizations
7367 && optimize
&& optimize_function_for_speed_p (cfun
))
7369 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7371 if (sqrtfn
!= NULL_TREE
)
7373 tree rpart
, ipart
, result
;
7375 arg
= builtin_save_expr (arg
);
7377 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7378 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7380 rpart
= builtin_save_expr (rpart
);
7381 ipart
= builtin_save_expr (ipart
);
7383 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7384 fold_build2_loc (loc
, MULT_EXPR
, type
,
7386 fold_build2_loc (loc
, MULT_EXPR
, type
,
7389 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7396 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7397 complex tree type of the result. If NEG is true, the imaginary
7398 zero is negative. */
7401 build_complex_cproj (tree type
, bool neg
)
7403 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7407 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7408 build_real (TREE_TYPE (type
), rzero
));
7411 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7412 return type. Return NULL_TREE if no simplification can be made. */
7415 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7417 if (!validate_arg (arg
, COMPLEX_TYPE
)
7418 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7421 /* If there are no infinities, return arg. */
7422 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7423 return non_lvalue_loc (loc
, arg
);
7425 /* Calculate the result when the argument is a constant. */
7426 if (TREE_CODE (arg
) == COMPLEX_CST
)
7428 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7429 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7431 if (real_isinf (real
) || real_isinf (imag
))
7432 return build_complex_cproj (type
, imag
->sign
);
7436 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7438 tree real
= TREE_OPERAND (arg
, 0);
7439 tree imag
= TREE_OPERAND (arg
, 1);
7444 /* If the real part is inf and the imag part is known to be
7445 nonnegative, return (inf + 0i). Remember side-effects are
7446 possible in the imag part. */
7447 if (TREE_CODE (real
) == REAL_CST
7448 && real_isinf (TREE_REAL_CST_PTR (real
))
7449 && tree_expr_nonnegative_p (imag
))
7450 return omit_one_operand_loc (loc
, type
,
7451 build_complex_cproj (type
, false),
7454 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7455 Remember side-effects are possible in the real part. */
7456 if (TREE_CODE (imag
) == REAL_CST
7457 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7459 omit_one_operand_loc (loc
, type
,
7460 build_complex_cproj (type
, TREE_REAL_CST_PTR
7461 (imag
)->sign
), arg
);
7467 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7468 Return NULL_TREE if no simplification can be made. */
7471 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7474 enum built_in_function fcode
;
7477 if (!validate_arg (arg
, REAL_TYPE
))
7480 /* Calculate the result when the argument is a constant. */
7481 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7484 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7485 fcode
= builtin_mathfn_code (arg
);
7486 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7488 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7489 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7490 CALL_EXPR_ARG (arg
, 0),
7491 build_real (type
, dconsthalf
));
7492 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7495 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7496 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7498 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7502 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7504 /* The inner root was either sqrt or cbrt. */
7505 /* This was a conditional expression but it triggered a bug
7507 REAL_VALUE_TYPE dconstroot
;
7508 if (BUILTIN_SQRT_P (fcode
))
7509 dconstroot
= dconsthalf
;
7511 dconstroot
= dconst_third ();
7513 /* Adjust for the outer root. */
7514 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7515 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7516 tree_root
= build_real (type
, dconstroot
);
7517 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7521 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7522 if (flag_unsafe_math_optimizations
7523 && (fcode
== BUILT_IN_POW
7524 || fcode
== BUILT_IN_POWF
7525 || fcode
== BUILT_IN_POWL
))
7527 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7528 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7529 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7531 if (!tree_expr_nonnegative_p (arg0
))
7532 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7533 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7534 build_real (type
, dconsthalf
));
7535 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7541 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7542 Return NULL_TREE if no simplification can be made. */
7545 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7547 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7550 if (!validate_arg (arg
, REAL_TYPE
))
7553 /* Calculate the result when the argument is a constant. */
7554 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7557 if (flag_unsafe_math_optimizations
)
7559 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7560 if (BUILTIN_EXPONENT_P (fcode
))
7562 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7563 const REAL_VALUE_TYPE third_trunc
=
7564 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7565 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7566 CALL_EXPR_ARG (arg
, 0),
7567 build_real (type
, third_trunc
));
7568 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7571 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7572 if (BUILTIN_SQRT_P (fcode
))
7574 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7578 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7580 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7582 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7583 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7584 tree_root
= build_real (type
, dconstroot
);
7585 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7589 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7590 if (BUILTIN_CBRT_P (fcode
))
7592 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7593 if (tree_expr_nonnegative_p (arg0
))
7595 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7600 REAL_VALUE_TYPE dconstroot
;
7602 real_arithmetic (&dconstroot
, MULT_EXPR
,
7603 dconst_third_ptr (), dconst_third_ptr ());
7604 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7605 tree_root
= build_real (type
, dconstroot
);
7606 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7611 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7612 if (fcode
== BUILT_IN_POW
7613 || fcode
== BUILT_IN_POWF
7614 || fcode
== BUILT_IN_POWL
)
7616 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7617 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7618 if (tree_expr_nonnegative_p (arg00
))
7620 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7621 const REAL_VALUE_TYPE dconstroot
7622 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7623 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7624 build_real (type
, dconstroot
));
7625 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7632 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7633 TYPE is the type of the return value. Return NULL_TREE if no
7634 simplification can be made. */
7637 fold_builtin_cos (location_t loc
,
7638 tree arg
, tree type
, tree fndecl
)
7642 if (!validate_arg (arg
, REAL_TYPE
))
7645 /* Calculate the result when the argument is a constant. */
7646 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7649 /* Optimize cos(-x) into cos (x). */
7650 if ((narg
= fold_strip_sign_ops (arg
)))
7651 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7656 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7657 Return NULL_TREE if no simplification can be made. */
7660 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7662 if (validate_arg (arg
, REAL_TYPE
))
7666 /* Calculate the result when the argument is a constant. */
7667 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7670 /* Optimize cosh(-x) into cosh (x). */
7671 if ((narg
= fold_strip_sign_ops (arg
)))
7672 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7678 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7679 argument ARG. TYPE is the type of the return value. Return
7680 NULL_TREE if no simplification can be made. */
7683 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7686 if (validate_arg (arg
, COMPLEX_TYPE
)
7687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7691 /* Calculate the result when the argument is a constant. */
7692 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7695 /* Optimize fn(-x) into fn(x). */
7696 if ((tmp
= fold_strip_sign_ops (arg
)))
7697 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7703 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7704 Return NULL_TREE if no simplification can be made. */
7707 fold_builtin_tan (tree arg
, tree type
)
7709 enum built_in_function fcode
;
7712 if (!validate_arg (arg
, REAL_TYPE
))
7715 /* Calculate the result when the argument is a constant. */
7716 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7719 /* Optimize tan(atan(x)) = x. */
7720 fcode
= builtin_mathfn_code (arg
);
7721 if (flag_unsafe_math_optimizations
7722 && (fcode
== BUILT_IN_ATAN
7723 || fcode
== BUILT_IN_ATANF
7724 || fcode
== BUILT_IN_ATANL
))
7725 return CALL_EXPR_ARG (arg
, 0);
7730 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7731 NULL_TREE if no simplification can be made. */
7734 fold_builtin_sincos (location_t loc
,
7735 tree arg0
, tree arg1
, tree arg2
)
7740 if (!validate_arg (arg0
, REAL_TYPE
)
7741 || !validate_arg (arg1
, POINTER_TYPE
)
7742 || !validate_arg (arg2
, POINTER_TYPE
))
7745 type
= TREE_TYPE (arg0
);
7747 /* Calculate the result when the argument is a constant. */
7748 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7751 /* Canonicalize sincos to cexpi. */
7752 if (!targetm
.libc_has_function (function_c99_math_complex
))
7754 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7758 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7759 call
= builtin_save_expr (call
);
7761 return build2 (COMPOUND_EXPR
, void_type_node
,
7762 build2 (MODIFY_EXPR
, void_type_node
,
7763 build_fold_indirect_ref_loc (loc
, arg1
),
7764 build1 (IMAGPART_EXPR
, type
, call
)),
7765 build2 (MODIFY_EXPR
, void_type_node
,
7766 build_fold_indirect_ref_loc (loc
, arg2
),
7767 build1 (REALPART_EXPR
, type
, call
)));
7770 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7771 NULL_TREE if no simplification can be made. */
7774 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7777 tree realp
, imagp
, ifn
;
7780 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7781 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7784 /* Calculate the result when the argument is a constant. */
7785 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7788 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7790 /* In case we can figure out the real part of arg0 and it is constant zero
7792 if (!targetm
.libc_has_function (function_c99_math_complex
))
7794 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7798 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7799 && real_zerop (realp
))
7801 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7802 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7805 /* In case we can easily decompose real and imaginary parts split cexp
7806 to exp (r) * cexpi (i). */
7807 if (flag_unsafe_math_optimizations
7810 tree rfn
, rcall
, icall
;
7812 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7816 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7820 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7821 icall
= builtin_save_expr (icall
);
7822 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7823 rcall
= builtin_save_expr (rcall
);
7824 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7825 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7827 fold_build1_loc (loc
, REALPART_EXPR
,
7829 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7831 fold_build1_loc (loc
, IMAGPART_EXPR
,
7838 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7839 Return NULL_TREE if no simplification can be made. */
7842 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7844 if (!validate_arg (arg
, REAL_TYPE
))
7847 /* Optimize trunc of constant value. */
7848 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7850 REAL_VALUE_TYPE r
, x
;
7851 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7853 x
= TREE_REAL_CST (arg
);
7854 real_trunc (&r
, TYPE_MODE (type
), &x
);
7855 return build_real (type
, r
);
7858 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7861 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7862 Return NULL_TREE if no simplification can be made. */
7865 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7867 if (!validate_arg (arg
, REAL_TYPE
))
7870 /* Optimize floor of constant value. */
7871 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7875 x
= TREE_REAL_CST (arg
);
7876 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7878 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7881 real_floor (&r
, TYPE_MODE (type
), &x
);
7882 return build_real (type
, r
);
7886 /* Fold floor (x) where x is nonnegative to trunc (x). */
7887 if (tree_expr_nonnegative_p (arg
))
7889 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7891 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7894 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7897 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7898 Return NULL_TREE if no simplification can be made. */
7901 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7903 if (!validate_arg (arg
, REAL_TYPE
))
7906 /* Optimize ceil of constant value. */
7907 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7911 x
= TREE_REAL_CST (arg
);
7912 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7914 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7917 real_ceil (&r
, TYPE_MODE (type
), &x
);
7918 return build_real (type
, r
);
7922 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7925 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7926 Return NULL_TREE if no simplification can be made. */
7929 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7931 if (!validate_arg (arg
, REAL_TYPE
))
7934 /* Optimize round of constant value. */
7935 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7939 x
= TREE_REAL_CST (arg
);
7940 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7942 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7945 real_round (&r
, TYPE_MODE (type
), &x
);
7946 return build_real (type
, r
);
7950 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7953 /* Fold function call to builtin lround, lroundf or lroundl (or the
7954 corresponding long long versions) and other rounding functions. ARG
7955 is the argument to the call. Return NULL_TREE if no simplification
7959 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7961 if (!validate_arg (arg
, REAL_TYPE
))
7964 /* Optimize lround of constant value. */
7965 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7967 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7969 if (real_isfinite (&x
))
7971 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7972 tree ftype
= TREE_TYPE (arg
);
7976 switch (DECL_FUNCTION_CODE (fndecl
))
7978 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7979 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7980 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7981 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7984 CASE_FLT_FN (BUILT_IN_ICEIL
):
7985 CASE_FLT_FN (BUILT_IN_LCEIL
):
7986 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7987 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7990 CASE_FLT_FN (BUILT_IN_IROUND
):
7991 CASE_FLT_FN (BUILT_IN_LROUND
):
7992 CASE_FLT_FN (BUILT_IN_LLROUND
):
7993 real_round (&r
, TYPE_MODE (ftype
), &x
);
8000 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8002 return wide_int_to_tree (itype
, val
);
8006 switch (DECL_FUNCTION_CODE (fndecl
))
8008 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8009 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8010 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8011 if (tree_expr_nonnegative_p (arg
))
8012 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8013 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8018 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8021 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8022 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8023 the argument to the call. Return NULL_TREE if no simplification can
8027 fold_builtin_bitop (tree fndecl
, tree arg
)
8029 if (!validate_arg (arg
, INTEGER_TYPE
))
8032 /* Optimize for constant argument. */
8033 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8035 tree type
= TREE_TYPE (arg
);
8038 switch (DECL_FUNCTION_CODE (fndecl
))
8040 CASE_INT_FN (BUILT_IN_FFS
):
8041 result
= wi::ffs (arg
);
8044 CASE_INT_FN (BUILT_IN_CLZ
):
8045 if (wi::ne_p (arg
, 0))
8046 result
= wi::clz (arg
);
8047 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8048 result
= TYPE_PRECISION (type
);
8051 CASE_INT_FN (BUILT_IN_CTZ
):
8052 if (wi::ne_p (arg
, 0))
8053 result
= wi::ctz (arg
);
8054 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8055 result
= TYPE_PRECISION (type
);
8058 CASE_INT_FN (BUILT_IN_CLRSB
):
8059 result
= wi::clrsb (arg
);
8062 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8063 result
= wi::popcount (arg
);
8066 CASE_INT_FN (BUILT_IN_PARITY
):
8067 result
= wi::parity (arg
);
8074 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8080 /* Fold function call to builtin_bswap and the short, long and long long
8081 variants. Return NULL_TREE if no simplification can be made. */
8083 fold_builtin_bswap (tree fndecl
, tree arg
)
8085 if (! validate_arg (arg
, INTEGER_TYPE
))
8088 /* Optimize constant value. */
8089 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8091 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8093 switch (DECL_FUNCTION_CODE (fndecl
))
8095 case BUILT_IN_BSWAP16
:
8096 case BUILT_IN_BSWAP32
:
8097 case BUILT_IN_BSWAP64
:
8099 signop sgn
= TYPE_SIGN (type
);
8101 wide_int_to_tree (type
,
8102 wide_int::from (arg
, TYPE_PRECISION (type
),
8114 /* A subroutine of fold_builtin to fold the various logarithmic
8115 functions. Return NULL_TREE if no simplification can me made.
8116 FUNC is the corresponding MPFR logarithm function. */
8119 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8120 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8122 if (validate_arg (arg
, REAL_TYPE
))
8124 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8126 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8128 /* Calculate the result when the argument is a constant. */
8129 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8132 /* Special case, optimize logN(expN(x)) = x. */
8133 if (flag_unsafe_math_optimizations
8134 && ((func
== mpfr_log
8135 && (fcode
== BUILT_IN_EXP
8136 || fcode
== BUILT_IN_EXPF
8137 || fcode
== BUILT_IN_EXPL
))
8138 || (func
== mpfr_log2
8139 && (fcode
== BUILT_IN_EXP2
8140 || fcode
== BUILT_IN_EXP2F
8141 || fcode
== BUILT_IN_EXP2L
))
8142 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8143 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8145 /* Optimize logN(func()) for various exponential functions. We
8146 want to determine the value "x" and the power "exponent" in
8147 order to transform logN(x**exponent) into exponent*logN(x). */
8148 if (flag_unsafe_math_optimizations
)
8150 tree exponent
= 0, x
= 0;
8154 CASE_FLT_FN (BUILT_IN_EXP
):
8155 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8156 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8158 exponent
= CALL_EXPR_ARG (arg
, 0);
8160 CASE_FLT_FN (BUILT_IN_EXP2
):
8161 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8162 x
= build_real (type
, dconst2
);
8163 exponent
= CALL_EXPR_ARG (arg
, 0);
8165 CASE_FLT_FN (BUILT_IN_EXP10
):
8166 CASE_FLT_FN (BUILT_IN_POW10
):
8167 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8169 REAL_VALUE_TYPE dconst10
;
8170 real_from_integer (&dconst10
, VOIDmode
, 10, SIGNED
);
8171 x
= build_real (type
, dconst10
);
8173 exponent
= CALL_EXPR_ARG (arg
, 0);
8175 CASE_FLT_FN (BUILT_IN_SQRT
):
8176 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8177 x
= CALL_EXPR_ARG (arg
, 0);
8178 exponent
= build_real (type
, dconsthalf
);
8180 CASE_FLT_FN (BUILT_IN_CBRT
):
8181 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8182 x
= CALL_EXPR_ARG (arg
, 0);
8183 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8186 CASE_FLT_FN (BUILT_IN_POW
):
8187 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8188 x
= CALL_EXPR_ARG (arg
, 0);
8189 exponent
= CALL_EXPR_ARG (arg
, 1);
8195 /* Now perform the optimization. */
8198 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8199 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8207 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8208 NULL_TREE if no simplification can be made. */
8211 fold_builtin_hypot (location_t loc
, tree fndecl
,
8212 tree arg0
, tree arg1
, tree type
)
8214 tree res
, narg0
, narg1
;
8216 if (!validate_arg (arg0
, REAL_TYPE
)
8217 || !validate_arg (arg1
, REAL_TYPE
))
8220 /* Calculate the result when the argument is a constant. */
8221 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8224 /* If either argument to hypot has a negate or abs, strip that off.
8225 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8226 narg0
= fold_strip_sign_ops (arg0
);
8227 narg1
= fold_strip_sign_ops (arg1
);
8230 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8231 narg1
? narg1
: arg1
);
8234 /* If either argument is zero, hypot is fabs of the other. */
8235 if (real_zerop (arg0
))
8236 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8237 else if (real_zerop (arg1
))
8238 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8240 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8241 if (flag_unsafe_math_optimizations
8242 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8244 const REAL_VALUE_TYPE sqrt2_trunc
8245 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8246 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8247 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8248 build_real (type
, sqrt2_trunc
));
8255 /* Fold a builtin function call to pow, powf, or powl. Return
8256 NULL_TREE if no simplification can be made. */
8258 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8262 if (!validate_arg (arg0
, REAL_TYPE
)
8263 || !validate_arg (arg1
, REAL_TYPE
))
8266 /* Calculate the result when the argument is a constant. */
8267 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8270 /* Optimize pow(1.0,y) = 1.0. */
8271 if (real_onep (arg0
))
8272 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8274 if (TREE_CODE (arg1
) == REAL_CST
8275 && !TREE_OVERFLOW (arg1
))
8277 REAL_VALUE_TYPE cint
;
8281 c
= TREE_REAL_CST (arg1
);
8283 /* Optimize pow(x,0.0) = 1.0. */
8284 if (REAL_VALUES_EQUAL (c
, dconst0
))
8285 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8288 /* Optimize pow(x,1.0) = x. */
8289 if (REAL_VALUES_EQUAL (c
, dconst1
))
8292 /* Optimize pow(x,-1.0) = 1.0/x. */
8293 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8294 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8295 build_real (type
, dconst1
), arg0
);
8297 /* Optimize pow(x,0.5) = sqrt(x). */
8298 if (flag_unsafe_math_optimizations
8299 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8301 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8303 if (sqrtfn
!= NULL_TREE
)
8304 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8307 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8308 if (flag_unsafe_math_optimizations
)
8310 const REAL_VALUE_TYPE dconstroot
8311 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8313 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8315 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8316 if (cbrtfn
!= NULL_TREE
)
8317 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8321 /* Check for an integer exponent. */
8322 n
= real_to_integer (&c
);
8323 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8324 if (real_identical (&c
, &cint
))
8326 /* Attempt to evaluate pow at compile-time, unless this should
8327 raise an exception. */
8328 if (TREE_CODE (arg0
) == REAL_CST
8329 && !TREE_OVERFLOW (arg0
)
8331 || (!flag_trapping_math
&& !flag_errno_math
)
8332 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8337 x
= TREE_REAL_CST (arg0
);
8338 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8339 if (flag_unsafe_math_optimizations
|| !inexact
)
8340 return build_real (type
, x
);
8343 /* Strip sign ops from even integer powers. */
8344 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8346 tree narg0
= fold_strip_sign_ops (arg0
);
8348 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8353 if (flag_unsafe_math_optimizations
)
8355 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8357 /* Optimize pow(expN(x),y) = expN(x*y). */
8358 if (BUILTIN_EXPONENT_P (fcode
))
8360 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8361 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8362 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8363 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8366 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8367 if (BUILTIN_SQRT_P (fcode
))
8369 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8370 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8371 build_real (type
, dconsthalf
));
8372 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8375 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8376 if (BUILTIN_CBRT_P (fcode
))
8378 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8379 if (tree_expr_nonnegative_p (arg
))
8381 const REAL_VALUE_TYPE dconstroot
8382 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8383 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8384 build_real (type
, dconstroot
));
8385 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8389 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8390 if (fcode
== BUILT_IN_POW
8391 || fcode
== BUILT_IN_POWF
8392 || fcode
== BUILT_IN_POWL
)
8394 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8395 if (tree_expr_nonnegative_p (arg00
))
8397 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8398 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8399 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8407 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8408 Return NULL_TREE if no simplification can be made. */
8410 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8411 tree arg0
, tree arg1
, tree type
)
8413 if (!validate_arg (arg0
, REAL_TYPE
)
8414 || !validate_arg (arg1
, INTEGER_TYPE
))
8417 /* Optimize pow(1.0,y) = 1.0. */
8418 if (real_onep (arg0
))
8419 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8421 if (tree_fits_shwi_p (arg1
))
8423 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8425 /* Evaluate powi at compile-time. */
8426 if (TREE_CODE (arg0
) == REAL_CST
8427 && !TREE_OVERFLOW (arg0
))
8430 x
= TREE_REAL_CST (arg0
);
8431 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8432 return build_real (type
, x
);
8435 /* Optimize pow(x,0) = 1.0. */
8437 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8440 /* Optimize pow(x,1) = x. */
8444 /* Optimize pow(x,-1) = 1.0/x. */
8446 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8447 build_real (type
, dconst1
), arg0
);
8453 /* A subroutine of fold_builtin to fold the various exponent
8454 functions. Return NULL_TREE if no simplification can be made.
8455 FUNC is the corresponding MPFR exponent function. */
8458 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8459 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8461 if (validate_arg (arg
, REAL_TYPE
))
8463 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8466 /* Calculate the result when the argument is a constant. */
8467 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8470 /* Optimize expN(logN(x)) = x. */
8471 if (flag_unsafe_math_optimizations
)
8473 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8475 if ((func
== mpfr_exp
8476 && (fcode
== BUILT_IN_LOG
8477 || fcode
== BUILT_IN_LOGF
8478 || fcode
== BUILT_IN_LOGL
))
8479 || (func
== mpfr_exp2
8480 && (fcode
== BUILT_IN_LOG2
8481 || fcode
== BUILT_IN_LOG2F
8482 || fcode
== BUILT_IN_LOG2L
))
8483 || (func
== mpfr_exp10
8484 && (fcode
== BUILT_IN_LOG10
8485 || fcode
== BUILT_IN_LOG10F
8486 || fcode
== BUILT_IN_LOG10L
)))
8487 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8494 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8495 Return NULL_TREE if no simplification can be made. */
8498 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8500 tree fn
, len
, lenp1
, call
, type
;
8502 if (!validate_arg (dest
, POINTER_TYPE
)
8503 || !validate_arg (src
, POINTER_TYPE
))
8506 len
= c_strlen (src
, 1);
8508 || TREE_CODE (len
) != INTEGER_CST
)
8511 if (optimize_function_for_size_p (cfun
)
8512 /* If length is zero it's small enough. */
8513 && !integer_zerop (len
))
8516 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8520 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
8521 fold_convert_loc (loc
, size_type_node
, len
),
8522 build_int_cst (size_type_node
, 1));
8523 /* We use dest twice in building our expression. Save it from
8524 multiple expansions. */
8525 dest
= builtin_save_expr (dest
);
8526 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
8528 type
= TREE_TYPE (TREE_TYPE (fndecl
));
8529 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8530 dest
= fold_convert_loc (loc
, type
, dest
);
8531 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
8535 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8536 arguments to the call, and TYPE is its return type.
8537 Return NULL_TREE if no simplification can be made. */
8540 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8542 if (!validate_arg (arg1
, POINTER_TYPE
)
8543 || !validate_arg (arg2
, INTEGER_TYPE
)
8544 || !validate_arg (len
, INTEGER_TYPE
))
8550 if (TREE_CODE (arg2
) != INTEGER_CST
8551 || !tree_fits_uhwi_p (len
))
8554 p1
= c_getstr (arg1
);
8555 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8561 if (target_char_cast (arg2
, &c
))
8564 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8567 return build_int_cst (TREE_TYPE (arg1
), 0);
8569 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8570 return fold_convert_loc (loc
, type
, tem
);
8576 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8577 Return NULL_TREE if no simplification can be made. */
8580 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8582 const char *p1
, *p2
;
8584 if (!validate_arg (arg1
, POINTER_TYPE
)
8585 || !validate_arg (arg2
, POINTER_TYPE
)
8586 || !validate_arg (len
, INTEGER_TYPE
))
8589 /* If the LEN parameter is zero, return zero. */
8590 if (integer_zerop (len
))
8591 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8594 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8595 if (operand_equal_p (arg1
, arg2
, 0))
8596 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8598 p1
= c_getstr (arg1
);
8599 p2
= c_getstr (arg2
);
8601 /* If all arguments are constant, and the value of len is not greater
8602 than the lengths of arg1 and arg2, evaluate at compile-time. */
8603 if (tree_fits_uhwi_p (len
) && p1
&& p2
8604 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8605 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8607 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8610 return integer_one_node
;
8612 return integer_minus_one_node
;
8614 return integer_zero_node
;
8617 /* If len parameter is one, return an expression corresponding to
8618 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8619 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8621 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8622 tree cst_uchar_ptr_node
8623 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8626 = fold_convert_loc (loc
, integer_type_node
,
8627 build1 (INDIRECT_REF
, cst_uchar_node
,
8628 fold_convert_loc (loc
,
8632 = fold_convert_loc (loc
, integer_type_node
,
8633 build1 (INDIRECT_REF
, cst_uchar_node
,
8634 fold_convert_loc (loc
,
8637 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8643 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8644 Return NULL_TREE if no simplification can be made. */
8647 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8649 const char *p1
, *p2
;
8651 if (!validate_arg (arg1
, POINTER_TYPE
)
8652 || !validate_arg (arg2
, POINTER_TYPE
))
8655 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8656 if (operand_equal_p (arg1
, arg2
, 0))
8657 return integer_zero_node
;
8659 p1
= c_getstr (arg1
);
8660 p2
= c_getstr (arg2
);
8664 const int i
= strcmp (p1
, p2
);
8666 return integer_minus_one_node
;
8668 return integer_one_node
;
8670 return integer_zero_node
;
8673 /* If the second arg is "", return *(const unsigned char*)arg1. */
8674 if (p2
&& *p2
== '\0')
8676 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8677 tree cst_uchar_ptr_node
8678 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8680 return fold_convert_loc (loc
, integer_type_node
,
8681 build1 (INDIRECT_REF
, cst_uchar_node
,
8682 fold_convert_loc (loc
,
8687 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8688 if (p1
&& *p1
== '\0')
8690 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8691 tree cst_uchar_ptr_node
8692 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8695 = fold_convert_loc (loc
, integer_type_node
,
8696 build1 (INDIRECT_REF
, cst_uchar_node
,
8697 fold_convert_loc (loc
,
8700 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8706 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8707 Return NULL_TREE if no simplification can be made. */
8710 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8712 const char *p1
, *p2
;
8714 if (!validate_arg (arg1
, POINTER_TYPE
)
8715 || !validate_arg (arg2
, POINTER_TYPE
)
8716 || !validate_arg (len
, INTEGER_TYPE
))
8719 /* If the LEN parameter is zero, return zero. */
8720 if (integer_zerop (len
))
8721 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8724 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8725 if (operand_equal_p (arg1
, arg2
, 0))
8726 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8728 p1
= c_getstr (arg1
);
8729 p2
= c_getstr (arg2
);
8731 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8733 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8735 return integer_one_node
;
8737 return integer_minus_one_node
;
8739 return integer_zero_node
;
8742 /* If the second arg is "", and the length is greater than zero,
8743 return *(const unsigned char*)arg1. */
8744 if (p2
&& *p2
== '\0'
8745 && TREE_CODE (len
) == INTEGER_CST
8746 && tree_int_cst_sgn (len
) == 1)
8748 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8749 tree cst_uchar_ptr_node
8750 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8752 return fold_convert_loc (loc
, integer_type_node
,
8753 build1 (INDIRECT_REF
, cst_uchar_node
,
8754 fold_convert_loc (loc
,
8759 /* If the first arg is "", and the length is greater than zero,
8760 return -*(const unsigned char*)arg2. */
8761 if (p1
&& *p1
== '\0'
8762 && TREE_CODE (len
) == INTEGER_CST
8763 && tree_int_cst_sgn (len
) == 1)
8765 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8766 tree cst_uchar_ptr_node
8767 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8769 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8770 build1 (INDIRECT_REF
, cst_uchar_node
,
8771 fold_convert_loc (loc
,
8774 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8777 /* If len parameter is one, return an expression corresponding to
8778 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8779 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8781 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8782 tree cst_uchar_ptr_node
8783 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8785 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8786 build1 (INDIRECT_REF
, cst_uchar_node
,
8787 fold_convert_loc (loc
,
8790 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8791 build1 (INDIRECT_REF
, cst_uchar_node
,
8792 fold_convert_loc (loc
,
8795 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8801 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8802 ARG. Return NULL_TREE if no simplification can be made. */
8805 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8807 if (!validate_arg (arg
, REAL_TYPE
))
8810 /* If ARG is a compile-time constant, determine the result. */
8811 if (TREE_CODE (arg
) == REAL_CST
8812 && !TREE_OVERFLOW (arg
))
8816 c
= TREE_REAL_CST (arg
);
8817 return (REAL_VALUE_NEGATIVE (c
)
8818 ? build_one_cst (type
)
8819 : build_zero_cst (type
));
8822 /* If ARG is non-negative, the result is always zero. */
8823 if (tree_expr_nonnegative_p (arg
))
8824 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8826 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8827 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
8828 return fold_convert (type
,
8829 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8830 build_real (TREE_TYPE (arg
), dconst0
)));
8835 /* Fold function call to builtin copysign, copysignf or copysignl with
8836 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8840 fold_builtin_copysign (location_t loc
, tree fndecl
,
8841 tree arg1
, tree arg2
, tree type
)
8845 if (!validate_arg (arg1
, REAL_TYPE
)
8846 || !validate_arg (arg2
, REAL_TYPE
))
8849 /* copysign(X,X) is X. */
8850 if (operand_equal_p (arg1
, arg2
, 0))
8851 return fold_convert_loc (loc
, type
, arg1
);
8853 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8854 if (TREE_CODE (arg1
) == REAL_CST
8855 && TREE_CODE (arg2
) == REAL_CST
8856 && !TREE_OVERFLOW (arg1
)
8857 && !TREE_OVERFLOW (arg2
))
8859 REAL_VALUE_TYPE c1
, c2
;
8861 c1
= TREE_REAL_CST (arg1
);
8862 c2
= TREE_REAL_CST (arg2
);
8863 /* c1.sign := c2.sign. */
8864 real_copysign (&c1
, &c2
);
8865 return build_real (type
, c1
);
8868 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8869 Remember to evaluate Y for side-effects. */
8870 if (tree_expr_nonnegative_p (arg2
))
8871 return omit_one_operand_loc (loc
, type
,
8872 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8875 /* Strip sign changing operations for the first argument. */
8876 tem
= fold_strip_sign_ops (arg1
);
8878 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
8883 /* Fold a call to builtin isascii with argument ARG. */
8886 fold_builtin_isascii (location_t loc
, tree arg
)
8888 if (!validate_arg (arg
, INTEGER_TYPE
))
8892 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8893 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8894 build_int_cst (integer_type_node
,
8895 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8896 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8897 arg
, integer_zero_node
);
8901 /* Fold a call to builtin toascii with argument ARG. */
8904 fold_builtin_toascii (location_t loc
, tree arg
)
8906 if (!validate_arg (arg
, INTEGER_TYPE
))
8909 /* Transform toascii(c) -> (c & 0x7f). */
8910 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8911 build_int_cst (integer_type_node
, 0x7f));
8914 /* Fold a call to builtin isdigit with argument ARG. */
8917 fold_builtin_isdigit (location_t loc
, tree arg
)
8919 if (!validate_arg (arg
, INTEGER_TYPE
))
8923 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8924 /* According to the C standard, isdigit is unaffected by locale.
8925 However, it definitely is affected by the target character set. */
8926 unsigned HOST_WIDE_INT target_digit0
8927 = lang_hooks
.to_target_charset ('0');
8929 if (target_digit0
== 0)
8932 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8933 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8934 build_int_cst (unsigned_type_node
, target_digit0
));
8935 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8936 build_int_cst (unsigned_type_node
, 9));
8940 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8943 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8945 if (!validate_arg (arg
, REAL_TYPE
))
8948 arg
= fold_convert_loc (loc
, type
, arg
);
8949 if (TREE_CODE (arg
) == REAL_CST
)
8950 return fold_abs_const (arg
, type
);
8951 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8954 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8957 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8959 if (!validate_arg (arg
, INTEGER_TYPE
))
8962 arg
= fold_convert_loc (loc
, type
, arg
);
8963 if (TREE_CODE (arg
) == INTEGER_CST
)
8964 return fold_abs_const (arg
, type
);
8965 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8968 /* Fold a fma operation with arguments ARG[012]. */
8971 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
8972 tree type
, tree arg0
, tree arg1
, tree arg2
)
8974 if (TREE_CODE (arg0
) == REAL_CST
8975 && TREE_CODE (arg1
) == REAL_CST
8976 && TREE_CODE (arg2
) == REAL_CST
)
8977 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
8982 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8985 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8987 if (validate_arg (arg0
, REAL_TYPE
)
8988 && validate_arg (arg1
, REAL_TYPE
)
8989 && validate_arg (arg2
, REAL_TYPE
))
8991 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
8995 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8996 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8997 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9002 /* Fold a call to builtin fmin or fmax. */
9005 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9006 tree type
, bool max
)
9008 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9010 /* Calculate the result when the argument is a constant. */
9011 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9016 /* If either argument is NaN, return the other one. Avoid the
9017 transformation if we get (and honor) a signalling NaN. Using
9018 omit_one_operand() ensures we create a non-lvalue. */
9019 if (TREE_CODE (arg0
) == REAL_CST
9020 && real_isnan (&TREE_REAL_CST (arg0
))
9021 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9022 || ! TREE_REAL_CST (arg0
).signalling
))
9023 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9024 if (TREE_CODE (arg1
) == REAL_CST
9025 && real_isnan (&TREE_REAL_CST (arg1
))
9026 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9027 || ! TREE_REAL_CST (arg1
).signalling
))
9028 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9030 /* Transform fmin/fmax(x,x) -> x. */
9031 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9032 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9034 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9035 functions to return the numeric arg if the other one is NaN.
9036 These tree codes don't honor that, so only transform if
9037 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9038 handled, so we don't have to worry about it either. */
9039 if (flag_finite_math_only
)
9040 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9041 fold_convert_loc (loc
, type
, arg0
),
9042 fold_convert_loc (loc
, type
, arg1
));
9047 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9050 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9052 if (validate_arg (arg
, COMPLEX_TYPE
)
9053 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9055 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9059 tree new_arg
= builtin_save_expr (arg
);
9060 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9061 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9062 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9069 /* Fold a call to builtin logb/ilogb. */
9072 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9074 if (! validate_arg (arg
, REAL_TYPE
))
9079 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9081 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9087 /* If arg is Inf or NaN and we're logb, return it. */
9088 if (TREE_CODE (rettype
) == REAL_TYPE
)
9090 /* For logb(-Inf) we have to return +Inf. */
9091 if (real_isinf (value
) && real_isneg (value
))
9093 REAL_VALUE_TYPE tem
;
9095 return build_real (rettype
, tem
);
9097 return fold_convert_loc (loc
, rettype
, arg
);
9099 /* Fall through... */
9101 /* Zero may set errno and/or raise an exception for logb, also
9102 for ilogb we don't know FP_ILOGB0. */
9105 /* For normal numbers, proceed iff radix == 2. In GCC,
9106 normalized significands are in the range [0.5, 1.0). We
9107 want the exponent as if they were [1.0, 2.0) so get the
9108 exponent and subtract 1. */
9109 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9110 return fold_convert_loc (loc
, rettype
,
9111 build_int_cst (integer_type_node
,
9112 REAL_EXP (value
)-1));
9120 /* Fold a call to builtin significand, if radix == 2. */
9123 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9125 if (! validate_arg (arg
, REAL_TYPE
))
9130 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9132 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9139 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9140 return fold_convert_loc (loc
, rettype
, arg
);
9142 /* For normal numbers, proceed iff radix == 2. */
9143 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9145 REAL_VALUE_TYPE result
= *value
;
9146 /* In GCC, normalized significands are in the range [0.5,
9147 1.0). We want them to be [1.0, 2.0) so set the
9149 SET_REAL_EXP (&result
, 1);
9150 return build_real (rettype
, result
);
9159 /* Fold a call to builtin frexp, we can assume the base is 2. */
9162 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9164 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9169 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9172 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9174 /* Proceed if a valid pointer type was passed in. */
9175 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9177 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9183 /* For +-0, return (*exp = 0, +-0). */
9184 exp
= integer_zero_node
;
9189 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9190 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9193 /* Since the frexp function always expects base 2, and in
9194 GCC normalized significands are already in the range
9195 [0.5, 1.0), we have exactly what frexp wants. */
9196 REAL_VALUE_TYPE frac_rvt
= *value
;
9197 SET_REAL_EXP (&frac_rvt
, 0);
9198 frac
= build_real (rettype
, frac_rvt
);
9199 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9206 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9207 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9208 TREE_SIDE_EFFECTS (arg1
) = 1;
9209 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9215 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9216 then we can assume the base is two. If it's false, then we have to
9217 check the mode of the TYPE parameter in certain cases. */
9220 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9221 tree type
, bool ldexp
)
9223 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9228 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9229 if (real_zerop (arg0
) || integer_zerop (arg1
)
9230 || (TREE_CODE (arg0
) == REAL_CST
9231 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9232 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9234 /* If both arguments are constant, then try to evaluate it. */
9235 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9236 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9237 && tree_fits_shwi_p (arg1
))
9239 /* Bound the maximum adjustment to twice the range of the
9240 mode's valid exponents. Use abs to ensure the range is
9241 positive as a sanity check. */
9242 const long max_exp_adj
= 2 *
9243 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9244 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9246 /* Get the user-requested adjustment. */
9247 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9249 /* The requested adjustment must be inside this range. This
9250 is a preliminary cap to avoid things like overflow, we
9251 may still fail to compute the result for other reasons. */
9252 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9254 REAL_VALUE_TYPE initial_result
;
9256 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9258 /* Ensure we didn't overflow. */
9259 if (! real_isinf (&initial_result
))
9261 const REAL_VALUE_TYPE trunc_result
9262 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9264 /* Only proceed if the target mode can hold the
9266 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9267 return build_real (type
, trunc_result
);
9276 /* Fold a call to builtin modf. */
9279 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9281 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9286 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9289 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9291 /* Proceed if a valid pointer type was passed in. */
9292 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9294 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9295 REAL_VALUE_TYPE trunc
, frac
;
9301 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9302 trunc
= frac
= *value
;
9305 /* For +-Inf, return (*arg1 = arg0, +-0). */
9307 frac
.sign
= value
->sign
;
9311 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9312 real_trunc (&trunc
, VOIDmode
, value
);
9313 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9314 /* If the original number was negative and already
9315 integral, then the fractional part is -0.0. */
9316 if (value
->sign
&& frac
.cl
== rvc_zero
)
9317 frac
.sign
= value
->sign
;
9321 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9322 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9323 build_real (rettype
, trunc
));
9324 TREE_SIDE_EFFECTS (arg1
) = 1;
9325 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9326 build_real (rettype
, frac
));
9332 /* Given a location LOC, an interclass builtin function decl FNDECL
9333 and its single argument ARG, return an folded expression computing
9334 the same, or NULL_TREE if we either couldn't or didn't want to fold
9335 (the latter happen if there's an RTL instruction available). */
9338 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9340 enum machine_mode mode
;
9342 if (!validate_arg (arg
, REAL_TYPE
))
9345 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9348 mode
= TYPE_MODE (TREE_TYPE (arg
));
9350 /* If there is no optab, try generic code. */
9351 switch (DECL_FUNCTION_CODE (fndecl
))
9355 CASE_FLT_FN (BUILT_IN_ISINF
):
9357 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9358 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9359 tree
const type
= TREE_TYPE (arg
);
9363 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9364 real_from_string (&r
, buf
);
9365 result
= build_call_expr (isgr_fn
, 2,
9366 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9367 build_real (type
, r
));
9370 CASE_FLT_FN (BUILT_IN_FINITE
):
9371 case BUILT_IN_ISFINITE
:
9373 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9374 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9375 tree
const type
= TREE_TYPE (arg
);
9379 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9380 real_from_string (&r
, buf
);
9381 result
= build_call_expr (isle_fn
, 2,
9382 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9383 build_real (type
, r
));
9384 /*result = fold_build2_loc (loc, UNGT_EXPR,
9385 TREE_TYPE (TREE_TYPE (fndecl)),
9386 fold_build1_loc (loc, ABS_EXPR, type, arg),
9387 build_real (type, r));
9388 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9389 TREE_TYPE (TREE_TYPE (fndecl)),
9393 case BUILT_IN_ISNORMAL
:
9395 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9396 islessequal(fabs(x),DBL_MAX). */
9397 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9398 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9399 tree
const type
= TREE_TYPE (arg
);
9400 REAL_VALUE_TYPE rmax
, rmin
;
9403 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9404 real_from_string (&rmax
, buf
);
9405 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9406 real_from_string (&rmin
, buf
);
9407 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9408 result
= build_call_expr (isle_fn
, 2, arg
,
9409 build_real (type
, rmax
));
9410 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9411 build_call_expr (isge_fn
, 2, arg
,
9412 build_real (type
, rmin
)));
9422 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9423 ARG is the argument for the call. */
9426 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9428 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9431 if (!validate_arg (arg
, REAL_TYPE
))
9434 switch (builtin_index
)
9436 case BUILT_IN_ISINF
:
9437 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9438 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9440 if (TREE_CODE (arg
) == REAL_CST
)
9442 r
= TREE_REAL_CST (arg
);
9443 if (real_isinf (&r
))
9444 return real_compare (GT_EXPR
, &r
, &dconst0
)
9445 ? integer_one_node
: integer_minus_one_node
;
9447 return integer_zero_node
;
9452 case BUILT_IN_ISINF_SIGN
:
9454 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9455 /* In a boolean context, GCC will fold the inner COND_EXPR to
9456 1. So e.g. "if (isinf_sign(x))" would be folded to just
9457 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9458 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9459 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9460 tree tmp
= NULL_TREE
;
9462 arg
= builtin_save_expr (arg
);
9464 if (signbit_fn
&& isinf_fn
)
9466 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9467 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9469 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9470 signbit_call
, integer_zero_node
);
9471 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9472 isinf_call
, integer_zero_node
);
9474 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9475 integer_minus_one_node
, integer_one_node
);
9476 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9484 case BUILT_IN_ISFINITE
:
9485 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9486 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9487 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9489 if (TREE_CODE (arg
) == REAL_CST
)
9491 r
= TREE_REAL_CST (arg
);
9492 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9497 case BUILT_IN_ISNAN
:
9498 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9499 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9501 if (TREE_CODE (arg
) == REAL_CST
)
9503 r
= TREE_REAL_CST (arg
);
9504 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9507 arg
= builtin_save_expr (arg
);
9508 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9515 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9516 This builtin will generate code to return the appropriate floating
9517 point classification depending on the value of the floating point
9518 number passed in. The possible return values must be supplied as
9519 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9520 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9521 one floating point argument which is "type generic". */
9524 fold_builtin_fpclassify (location_t loc
, tree exp
)
9526 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9527 arg
, type
, res
, tmp
;
9528 enum machine_mode mode
;
9532 /* Verify the required arguments in the original call. */
9533 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
9534 INTEGER_TYPE
, INTEGER_TYPE
,
9535 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
9538 fp_nan
= CALL_EXPR_ARG (exp
, 0);
9539 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
9540 fp_normal
= CALL_EXPR_ARG (exp
, 2);
9541 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
9542 fp_zero
= CALL_EXPR_ARG (exp
, 4);
9543 arg
= CALL_EXPR_ARG (exp
, 5);
9544 type
= TREE_TYPE (arg
);
9545 mode
= TYPE_MODE (type
);
9546 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9550 (fabs(x) == Inf ? FP_INFINITE :
9551 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9552 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9554 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9555 build_real (type
, dconst0
));
9556 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9557 tmp
, fp_zero
, fp_subnormal
);
9559 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9560 real_from_string (&r
, buf
);
9561 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9562 arg
, build_real (type
, r
));
9563 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9565 if (HONOR_INFINITIES (mode
))
9568 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9569 build_real (type
, r
));
9570 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9574 if (HONOR_NANS (mode
))
9576 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9577 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9583 /* Fold a call to an unordered comparison function such as
9584 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9585 being called and ARG0 and ARG1 are the arguments for the call.
9586 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9587 the opposite of the desired result. UNORDERED_CODE is used
9588 for modes that can hold NaNs and ORDERED_CODE is used for
9592 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9593 enum tree_code unordered_code
,
9594 enum tree_code ordered_code
)
9596 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9597 enum tree_code code
;
9599 enum tree_code code0
, code1
;
9600 tree cmp_type
= NULL_TREE
;
9602 type0
= TREE_TYPE (arg0
);
9603 type1
= TREE_TYPE (arg1
);
9605 code0
= TREE_CODE (type0
);
9606 code1
= TREE_CODE (type1
);
9608 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9609 /* Choose the wider of two real types. */
9610 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9612 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9614 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9617 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9618 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9620 if (unordered_code
== UNORDERED_EXPR
)
9622 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9623 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9624 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9627 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9629 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9630 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9633 /* Fold a call to built-in function FNDECL with 0 arguments.
9634 IGNORE is true if the result of the function call is ignored. This
9635 function returns NULL_TREE if no simplification was possible. */
9638 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9640 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9641 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9644 CASE_FLT_FN (BUILT_IN_INF
):
9645 case BUILT_IN_INFD32
:
9646 case BUILT_IN_INFD64
:
9647 case BUILT_IN_INFD128
:
9648 return fold_builtin_inf (loc
, type
, true);
9650 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9651 return fold_builtin_inf (loc
, type
, false);
9653 case BUILT_IN_CLASSIFY_TYPE
:
9654 return fold_builtin_classify_type (NULL_TREE
);
9656 case BUILT_IN_UNREACHABLE
:
9657 if (flag_sanitize
& SANITIZE_UNREACHABLE
9658 && (current_function_decl
== NULL
9659 || !lookup_attribute ("no_sanitize_undefined",
9660 DECL_ATTRIBUTES (current_function_decl
))))
9661 return ubsan_instrument_unreachable (loc
);
9670 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9671 IGNORE is true if the result of the function call is ignored. This
9672 function returns NULL_TREE if no simplification was possible. */
9675 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
9677 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9678 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9681 case BUILT_IN_CONSTANT_P
:
9683 tree val
= fold_builtin_constant_p (arg0
);
9685 /* Gimplification will pull the CALL_EXPR for the builtin out of
9686 an if condition. When not optimizing, we'll not CSE it back.
9687 To avoid link error types of regressions, return false now. */
9688 if (!val
&& !optimize
)
9689 val
= integer_zero_node
;
9694 case BUILT_IN_CLASSIFY_TYPE
:
9695 return fold_builtin_classify_type (arg0
);
9697 case BUILT_IN_STRLEN
:
9698 return fold_builtin_strlen (loc
, type
, arg0
);
9700 CASE_FLT_FN (BUILT_IN_FABS
):
9701 case BUILT_IN_FABSD32
:
9702 case BUILT_IN_FABSD64
:
9703 case BUILT_IN_FABSD128
:
9704 return fold_builtin_fabs (loc
, arg0
, type
);
9708 case BUILT_IN_LLABS
:
9709 case BUILT_IN_IMAXABS
:
9710 return fold_builtin_abs (loc
, arg0
, type
);
9712 CASE_FLT_FN (BUILT_IN_CONJ
):
9713 if (validate_arg (arg0
, COMPLEX_TYPE
)
9714 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9715 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9718 CASE_FLT_FN (BUILT_IN_CREAL
):
9719 if (validate_arg (arg0
, COMPLEX_TYPE
)
9720 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9721 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
9724 CASE_FLT_FN (BUILT_IN_CIMAG
):
9725 if (validate_arg (arg0
, COMPLEX_TYPE
)
9726 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9727 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9730 CASE_FLT_FN (BUILT_IN_CCOS
):
9731 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9733 CASE_FLT_FN (BUILT_IN_CCOSH
):
9734 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9736 CASE_FLT_FN (BUILT_IN_CPROJ
):
9737 return fold_builtin_cproj (loc
, arg0
, type
);
9739 CASE_FLT_FN (BUILT_IN_CSIN
):
9740 if (validate_arg (arg0
, COMPLEX_TYPE
)
9741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9742 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9745 CASE_FLT_FN (BUILT_IN_CSINH
):
9746 if (validate_arg (arg0
, COMPLEX_TYPE
)
9747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9748 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9751 CASE_FLT_FN (BUILT_IN_CTAN
):
9752 if (validate_arg (arg0
, COMPLEX_TYPE
)
9753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9754 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9757 CASE_FLT_FN (BUILT_IN_CTANH
):
9758 if (validate_arg (arg0
, COMPLEX_TYPE
)
9759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9760 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9763 CASE_FLT_FN (BUILT_IN_CLOG
):
9764 if (validate_arg (arg0
, COMPLEX_TYPE
)
9765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9766 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9769 CASE_FLT_FN (BUILT_IN_CSQRT
):
9770 if (validate_arg (arg0
, COMPLEX_TYPE
)
9771 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9772 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9775 CASE_FLT_FN (BUILT_IN_CASIN
):
9776 if (validate_arg (arg0
, COMPLEX_TYPE
)
9777 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9778 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9781 CASE_FLT_FN (BUILT_IN_CACOS
):
9782 if (validate_arg (arg0
, COMPLEX_TYPE
)
9783 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9784 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9787 CASE_FLT_FN (BUILT_IN_CATAN
):
9788 if (validate_arg (arg0
, COMPLEX_TYPE
)
9789 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9790 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9793 CASE_FLT_FN (BUILT_IN_CASINH
):
9794 if (validate_arg (arg0
, COMPLEX_TYPE
)
9795 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9796 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9799 CASE_FLT_FN (BUILT_IN_CACOSH
):
9800 if (validate_arg (arg0
, COMPLEX_TYPE
)
9801 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9802 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9805 CASE_FLT_FN (BUILT_IN_CATANH
):
9806 if (validate_arg (arg0
, COMPLEX_TYPE
)
9807 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9808 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9811 CASE_FLT_FN (BUILT_IN_CABS
):
9812 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
9814 CASE_FLT_FN (BUILT_IN_CARG
):
9815 return fold_builtin_carg (loc
, arg0
, type
);
9817 CASE_FLT_FN (BUILT_IN_SQRT
):
9818 return fold_builtin_sqrt (loc
, arg0
, type
);
9820 CASE_FLT_FN (BUILT_IN_CBRT
):
9821 return fold_builtin_cbrt (loc
, arg0
, type
);
9823 CASE_FLT_FN (BUILT_IN_ASIN
):
9824 if (validate_arg (arg0
, REAL_TYPE
))
9825 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9826 &dconstm1
, &dconst1
, true);
9829 CASE_FLT_FN (BUILT_IN_ACOS
):
9830 if (validate_arg (arg0
, REAL_TYPE
))
9831 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9832 &dconstm1
, &dconst1
, true);
9835 CASE_FLT_FN (BUILT_IN_ATAN
):
9836 if (validate_arg (arg0
, REAL_TYPE
))
9837 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9840 CASE_FLT_FN (BUILT_IN_ASINH
):
9841 if (validate_arg (arg0
, REAL_TYPE
))
9842 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9845 CASE_FLT_FN (BUILT_IN_ACOSH
):
9846 if (validate_arg (arg0
, REAL_TYPE
))
9847 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9848 &dconst1
, NULL
, true);
9851 CASE_FLT_FN (BUILT_IN_ATANH
):
9852 if (validate_arg (arg0
, REAL_TYPE
))
9853 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9854 &dconstm1
, &dconst1
, false);
9857 CASE_FLT_FN (BUILT_IN_SIN
):
9858 if (validate_arg (arg0
, REAL_TYPE
))
9859 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9862 CASE_FLT_FN (BUILT_IN_COS
):
9863 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
9865 CASE_FLT_FN (BUILT_IN_TAN
):
9866 return fold_builtin_tan (arg0
, type
);
9868 CASE_FLT_FN (BUILT_IN_CEXP
):
9869 return fold_builtin_cexp (loc
, arg0
, type
);
9871 CASE_FLT_FN (BUILT_IN_CEXPI
):
9872 if (validate_arg (arg0
, REAL_TYPE
))
9873 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9876 CASE_FLT_FN (BUILT_IN_SINH
):
9877 if (validate_arg (arg0
, REAL_TYPE
))
9878 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9881 CASE_FLT_FN (BUILT_IN_COSH
):
9882 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
9884 CASE_FLT_FN (BUILT_IN_TANH
):
9885 if (validate_arg (arg0
, REAL_TYPE
))
9886 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9889 CASE_FLT_FN (BUILT_IN_ERF
):
9890 if (validate_arg (arg0
, REAL_TYPE
))
9891 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9894 CASE_FLT_FN (BUILT_IN_ERFC
):
9895 if (validate_arg (arg0
, REAL_TYPE
))
9896 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9899 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9900 if (validate_arg (arg0
, REAL_TYPE
))
9901 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9904 CASE_FLT_FN (BUILT_IN_EXP
):
9905 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
9907 CASE_FLT_FN (BUILT_IN_EXP2
):
9908 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
9910 CASE_FLT_FN (BUILT_IN_EXP10
):
9911 CASE_FLT_FN (BUILT_IN_POW10
):
9912 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
9914 CASE_FLT_FN (BUILT_IN_EXPM1
):
9915 if (validate_arg (arg0
, REAL_TYPE
))
9916 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9919 CASE_FLT_FN (BUILT_IN_LOG
):
9920 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
9922 CASE_FLT_FN (BUILT_IN_LOG2
):
9923 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
9925 CASE_FLT_FN (BUILT_IN_LOG10
):
9926 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
9928 CASE_FLT_FN (BUILT_IN_LOG1P
):
9929 if (validate_arg (arg0
, REAL_TYPE
))
9930 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9931 &dconstm1
, NULL
, false);
9934 CASE_FLT_FN (BUILT_IN_J0
):
9935 if (validate_arg (arg0
, REAL_TYPE
))
9936 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9940 CASE_FLT_FN (BUILT_IN_J1
):
9941 if (validate_arg (arg0
, REAL_TYPE
))
9942 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9946 CASE_FLT_FN (BUILT_IN_Y0
):
9947 if (validate_arg (arg0
, REAL_TYPE
))
9948 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9949 &dconst0
, NULL
, false);
9952 CASE_FLT_FN (BUILT_IN_Y1
):
9953 if (validate_arg (arg0
, REAL_TYPE
))
9954 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9955 &dconst0
, NULL
, false);
9958 CASE_FLT_FN (BUILT_IN_NAN
):
9959 case BUILT_IN_NAND32
:
9960 case BUILT_IN_NAND64
:
9961 case BUILT_IN_NAND128
:
9962 return fold_builtin_nan (arg0
, type
, true);
9964 CASE_FLT_FN (BUILT_IN_NANS
):
9965 return fold_builtin_nan (arg0
, type
, false);
9967 CASE_FLT_FN (BUILT_IN_FLOOR
):
9968 return fold_builtin_floor (loc
, fndecl
, arg0
);
9970 CASE_FLT_FN (BUILT_IN_CEIL
):
9971 return fold_builtin_ceil (loc
, fndecl
, arg0
);
9973 CASE_FLT_FN (BUILT_IN_TRUNC
):
9974 return fold_builtin_trunc (loc
, fndecl
, arg0
);
9976 CASE_FLT_FN (BUILT_IN_ROUND
):
9977 return fold_builtin_round (loc
, fndecl
, arg0
);
9979 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9980 CASE_FLT_FN (BUILT_IN_RINT
):
9981 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
9983 CASE_FLT_FN (BUILT_IN_ICEIL
):
9984 CASE_FLT_FN (BUILT_IN_LCEIL
):
9985 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9986 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9987 CASE_FLT_FN (BUILT_IN_IFLOOR
):
9988 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9989 CASE_FLT_FN (BUILT_IN_IROUND
):
9990 CASE_FLT_FN (BUILT_IN_LROUND
):
9991 CASE_FLT_FN (BUILT_IN_LLROUND
):
9992 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
9994 CASE_FLT_FN (BUILT_IN_IRINT
):
9995 CASE_FLT_FN (BUILT_IN_LRINT
):
9996 CASE_FLT_FN (BUILT_IN_LLRINT
):
9997 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
9999 case BUILT_IN_BSWAP16
:
10000 case BUILT_IN_BSWAP32
:
10001 case BUILT_IN_BSWAP64
:
10002 return fold_builtin_bswap (fndecl
, arg0
);
10004 CASE_INT_FN (BUILT_IN_FFS
):
10005 CASE_INT_FN (BUILT_IN_CLZ
):
10006 CASE_INT_FN (BUILT_IN_CTZ
):
10007 CASE_INT_FN (BUILT_IN_CLRSB
):
10008 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10009 CASE_INT_FN (BUILT_IN_PARITY
):
10010 return fold_builtin_bitop (fndecl
, arg0
);
10012 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10013 return fold_builtin_signbit (loc
, arg0
, type
);
10015 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10016 return fold_builtin_significand (loc
, arg0
, type
);
10018 CASE_FLT_FN (BUILT_IN_ILOGB
):
10019 CASE_FLT_FN (BUILT_IN_LOGB
):
10020 return fold_builtin_logb (loc
, arg0
, type
);
10022 case BUILT_IN_ISASCII
:
10023 return fold_builtin_isascii (loc
, arg0
);
10025 case BUILT_IN_TOASCII
:
10026 return fold_builtin_toascii (loc
, arg0
);
10028 case BUILT_IN_ISDIGIT
:
10029 return fold_builtin_isdigit (loc
, arg0
);
10031 CASE_FLT_FN (BUILT_IN_FINITE
):
10032 case BUILT_IN_FINITED32
:
10033 case BUILT_IN_FINITED64
:
10034 case BUILT_IN_FINITED128
:
10035 case BUILT_IN_ISFINITE
:
10037 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10040 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10043 CASE_FLT_FN (BUILT_IN_ISINF
):
10044 case BUILT_IN_ISINFD32
:
10045 case BUILT_IN_ISINFD64
:
10046 case BUILT_IN_ISINFD128
:
10048 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10051 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10054 case BUILT_IN_ISNORMAL
:
10055 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10057 case BUILT_IN_ISINF_SIGN
:
10058 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10060 CASE_FLT_FN (BUILT_IN_ISNAN
):
10061 case BUILT_IN_ISNAND32
:
10062 case BUILT_IN_ISNAND64
:
10063 case BUILT_IN_ISNAND128
:
10064 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10066 case BUILT_IN_PRINTF
:
10067 case BUILT_IN_PRINTF_UNLOCKED
:
10068 case BUILT_IN_VPRINTF
:
10069 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10071 case BUILT_IN_FREE
:
10072 if (integer_zerop (arg0
))
10073 return build_empty_stmt (loc
);
10084 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10085 IGNORE is true if the result of the function call is ignored. This
10086 function returns NULL_TREE if no simplification was possible. */
10089 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10091 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10092 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10096 CASE_FLT_FN (BUILT_IN_JN
):
10097 if (validate_arg (arg0
, INTEGER_TYPE
)
10098 && validate_arg (arg1
, REAL_TYPE
))
10099 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10102 CASE_FLT_FN (BUILT_IN_YN
):
10103 if (validate_arg (arg0
, INTEGER_TYPE
)
10104 && validate_arg (arg1
, REAL_TYPE
))
10105 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10109 CASE_FLT_FN (BUILT_IN_DREM
):
10110 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10111 if (validate_arg (arg0
, REAL_TYPE
)
10112 && validate_arg (arg1
, REAL_TYPE
))
10113 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10116 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10117 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10118 if (validate_arg (arg0
, REAL_TYPE
)
10119 && validate_arg (arg1
, POINTER_TYPE
))
10120 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10123 CASE_FLT_FN (BUILT_IN_ATAN2
):
10124 if (validate_arg (arg0
, REAL_TYPE
)
10125 && validate_arg (arg1
, REAL_TYPE
))
10126 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10129 CASE_FLT_FN (BUILT_IN_FDIM
):
10130 if (validate_arg (arg0
, REAL_TYPE
)
10131 && validate_arg (arg1
, REAL_TYPE
))
10132 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10135 CASE_FLT_FN (BUILT_IN_HYPOT
):
10136 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10138 CASE_FLT_FN (BUILT_IN_CPOW
):
10139 if (validate_arg (arg0
, COMPLEX_TYPE
)
10140 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10141 && validate_arg (arg1
, COMPLEX_TYPE
)
10142 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10143 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10146 CASE_FLT_FN (BUILT_IN_LDEXP
):
10147 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10148 CASE_FLT_FN (BUILT_IN_SCALBN
):
10149 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10150 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10151 type
, /*ldexp=*/false);
10153 CASE_FLT_FN (BUILT_IN_FREXP
):
10154 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10156 CASE_FLT_FN (BUILT_IN_MODF
):
10157 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10159 case BUILT_IN_STRSTR
:
10160 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10162 case BUILT_IN_STRSPN
:
10163 return fold_builtin_strspn (loc
, arg0
, arg1
);
10165 case BUILT_IN_STRCSPN
:
10166 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10168 case BUILT_IN_STRCHR
:
10169 case BUILT_IN_INDEX
:
10170 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10172 case BUILT_IN_STRRCHR
:
10173 case BUILT_IN_RINDEX
:
10174 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10176 case BUILT_IN_STPCPY
:
10179 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10183 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10186 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10189 case BUILT_IN_STRCMP
:
10190 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10192 case BUILT_IN_STRPBRK
:
10193 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10195 case BUILT_IN_EXPECT
:
10196 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10198 CASE_FLT_FN (BUILT_IN_POW
):
10199 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10201 CASE_FLT_FN (BUILT_IN_POWI
):
10202 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10204 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10205 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10207 CASE_FLT_FN (BUILT_IN_FMIN
):
10208 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10210 CASE_FLT_FN (BUILT_IN_FMAX
):
10211 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10213 case BUILT_IN_ISGREATER
:
10214 return fold_builtin_unordered_cmp (loc
, fndecl
,
10215 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10216 case BUILT_IN_ISGREATEREQUAL
:
10217 return fold_builtin_unordered_cmp (loc
, fndecl
,
10218 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10219 case BUILT_IN_ISLESS
:
10220 return fold_builtin_unordered_cmp (loc
, fndecl
,
10221 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10222 case BUILT_IN_ISLESSEQUAL
:
10223 return fold_builtin_unordered_cmp (loc
, fndecl
,
10224 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10225 case BUILT_IN_ISLESSGREATER
:
10226 return fold_builtin_unordered_cmp (loc
, fndecl
,
10227 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10228 case BUILT_IN_ISUNORDERED
:
10229 return fold_builtin_unordered_cmp (loc
, fndecl
,
10230 arg0
, arg1
, UNORDERED_EXPR
,
10233 /* We do the folding for va_start in the expander. */
10234 case BUILT_IN_VA_START
:
10237 case BUILT_IN_SPRINTF
:
10238 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10240 case BUILT_IN_OBJECT_SIZE
:
10241 return fold_builtin_object_size (arg0
, arg1
);
10243 case BUILT_IN_PRINTF
:
10244 case BUILT_IN_PRINTF_UNLOCKED
:
10245 case BUILT_IN_VPRINTF
:
10246 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10248 case BUILT_IN_PRINTF_CHK
:
10249 case BUILT_IN_VPRINTF_CHK
:
10250 if (!validate_arg (arg0
, INTEGER_TYPE
)
10251 || TREE_SIDE_EFFECTS (arg0
))
10254 return fold_builtin_printf (loc
, fndecl
,
10255 arg1
, NULL_TREE
, ignore
, fcode
);
10258 case BUILT_IN_FPRINTF
:
10259 case BUILT_IN_FPRINTF_UNLOCKED
:
10260 case BUILT_IN_VFPRINTF
:
10261 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10264 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10265 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10267 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10268 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10276 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10277 and ARG2. IGNORE is true if the result of the function call is ignored.
10278 This function returns NULL_TREE if no simplification was possible. */
10281 fold_builtin_3 (location_t loc
, tree fndecl
,
10282 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10284 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10285 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10289 CASE_FLT_FN (BUILT_IN_SINCOS
):
10290 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10292 CASE_FLT_FN (BUILT_IN_FMA
):
10293 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10296 CASE_FLT_FN (BUILT_IN_REMQUO
):
10297 if (validate_arg (arg0
, REAL_TYPE
)
10298 && validate_arg (arg1
, REAL_TYPE
)
10299 && validate_arg (arg2
, POINTER_TYPE
))
10300 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10303 case BUILT_IN_STRNCAT
:
10304 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10306 case BUILT_IN_STRNCMP
:
10307 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10309 case BUILT_IN_MEMCHR
:
10310 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10312 case BUILT_IN_BCMP
:
10313 case BUILT_IN_MEMCMP
:
10314 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10316 case BUILT_IN_SPRINTF
:
10317 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10319 case BUILT_IN_SNPRINTF
:
10320 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10322 case BUILT_IN_STRCAT_CHK
:
10323 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10325 case BUILT_IN_PRINTF_CHK
:
10326 case BUILT_IN_VPRINTF_CHK
:
10327 if (!validate_arg (arg0
, INTEGER_TYPE
)
10328 || TREE_SIDE_EFFECTS (arg0
))
10331 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10334 case BUILT_IN_FPRINTF
:
10335 case BUILT_IN_FPRINTF_UNLOCKED
:
10336 case BUILT_IN_VFPRINTF
:
10337 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10340 case BUILT_IN_FPRINTF_CHK
:
10341 case BUILT_IN_VFPRINTF_CHK
:
10342 if (!validate_arg (arg1
, INTEGER_TYPE
)
10343 || TREE_SIDE_EFFECTS (arg1
))
10346 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10349 case BUILT_IN_EXPECT
:
10350 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10358 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10359 ARG2, and ARG3. IGNORE is true if the result of the function call is
10360 ignored. This function returns NULL_TREE if no simplification was
10364 fold_builtin_4 (location_t loc
, tree fndecl
,
10365 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10367 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10371 case BUILT_IN_STRNCAT_CHK
:
10372 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10374 case BUILT_IN_SNPRINTF
:
10375 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
10377 case BUILT_IN_FPRINTF_CHK
:
10378 case BUILT_IN_VFPRINTF_CHK
:
10379 if (!validate_arg (arg1
, INTEGER_TYPE
)
10380 || TREE_SIDE_EFFECTS (arg1
))
10383 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10393 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10394 arguments, where NARGS <= 4. IGNORE is true if the result of the
10395 function call is ignored. This function returns NULL_TREE if no
10396 simplification was possible. Note that this only folds builtins with
10397 fixed argument patterns. Foldings that do varargs-to-varargs
10398 transformations, or that match calls with more than 4 arguments,
10399 need to be handled with fold_builtin_varargs instead. */
10401 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10404 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10406 tree ret
= NULL_TREE
;
10411 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
10414 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
10417 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
10420 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
10423 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
10431 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10432 SET_EXPR_LOCATION (ret
, loc
);
10433 TREE_NO_WARNING (ret
) = 1;
10439 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10440 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10441 of arguments in ARGS to be omitted. OLDNARGS is the number of
10442 elements in ARGS. */
10445 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10446 int skip
, tree fndecl
, int n
, va_list newargs
)
10448 int nargs
= oldnargs
- skip
+ n
;
10455 buffer
= XALLOCAVEC (tree
, nargs
);
10456 for (i
= 0; i
< n
; i
++)
10457 buffer
[i
] = va_arg (newargs
, tree
);
10458 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10459 buffer
[i
] = args
[j
];
10462 buffer
= args
+ skip
;
10464 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10467 /* Return true if FNDECL shouldn't be folded right now.
10468 If a built-in function has an inline attribute always_inline
10469 wrapper, defer folding it after always_inline functions have
10470 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10471 might not be performed. */
10474 avoid_folding_inline_builtin (tree fndecl
)
10476 return (DECL_DECLARED_INLINE_P (fndecl
)
10477 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10479 && !cfun
->always_inline_functions_inlined
10480 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10483 /* A wrapper function for builtin folding that prevents warnings for
10484 "statement without effect" and the like, caused by removing the
10485 call node earlier than the warning is generated. */
10488 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10490 tree ret
= NULL_TREE
;
10491 tree fndecl
= get_callee_fndecl (exp
);
10493 && TREE_CODE (fndecl
) == FUNCTION_DECL
10494 && DECL_BUILT_IN (fndecl
)
10495 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10496 yet. Defer folding until we see all the arguments
10497 (after inlining). */
10498 && !CALL_EXPR_VA_ARG_PACK (exp
))
10500 int nargs
= call_expr_nargs (exp
);
10502 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10503 instead last argument is __builtin_va_arg_pack (). Defer folding
10504 even in that case, until arguments are finalized. */
10505 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10507 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10509 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10510 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10511 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10515 if (avoid_folding_inline_builtin (fndecl
))
10518 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10519 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10520 CALL_EXPR_ARGP (exp
), ignore
);
10523 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10525 tree
*args
= CALL_EXPR_ARGP (exp
);
10526 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10529 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
10537 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10538 N arguments are passed in the array ARGARRAY. */
10541 fold_builtin_call_array (location_t loc
, tree type
,
10546 tree ret
= NULL_TREE
;
10549 if (TREE_CODE (fn
) == ADDR_EXPR
)
10551 tree fndecl
= TREE_OPERAND (fn
, 0);
10552 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10553 && DECL_BUILT_IN (fndecl
))
10555 /* If last argument is __builtin_va_arg_pack (), arguments to this
10556 function are not finalized yet. Defer folding until they are. */
10557 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10559 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10561 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10562 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10563 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10564 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10566 if (avoid_folding_inline_builtin (fndecl
))
10567 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10568 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10570 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10574 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10576 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10578 /* First try the transformations that don't require consing up
10580 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10585 /* If we got this far, we need to build an exp. */
10586 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10587 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
10588 return ret
? ret
: exp
;
10592 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10595 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10596 along with N new arguments specified as the "..." parameters. SKIP
10597 is the number of arguments in EXP to be omitted. This function is used
10598 to do varargs-to-varargs transformations. */
10601 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10607 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10608 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10614 /* Validate a single argument ARG against a tree code CODE representing
10618 validate_arg (const_tree arg
, enum tree_code code
)
10622 else if (code
== POINTER_TYPE
)
10623 return POINTER_TYPE_P (TREE_TYPE (arg
));
10624 else if (code
== INTEGER_TYPE
)
10625 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10626 return code
== TREE_CODE (TREE_TYPE (arg
));
10629 /* This function validates the types of a function call argument list
10630 against a specified list of tree_codes. If the last specifier is a 0,
10631 that represents an ellipses, otherwise the last specifier must be a
10634 This is the GIMPLE version of validate_arglist. Eventually we want to
10635 completely convert builtins.c to work from GIMPLEs and the tree based
10636 validate_arglist will then be removed. */
10639 validate_gimple_arglist (const_gimple call
, ...)
10641 enum tree_code code
;
10647 va_start (ap
, call
);
10652 code
= (enum tree_code
) va_arg (ap
, int);
10656 /* This signifies an ellipses, any further arguments are all ok. */
10660 /* This signifies an endlink, if no arguments remain, return
10661 true, otherwise return false. */
10662 res
= (i
== gimple_call_num_args (call
));
10665 /* If no parameters remain or the parameter's code does not
10666 match the specified code, return false. Otherwise continue
10667 checking any remaining arguments. */
10668 arg
= gimple_call_arg (call
, i
++);
10669 if (!validate_arg (arg
, code
))
10676 /* We need gotos here since we can only have one VA_CLOSE in a
10684 /* Default target-specific builtin expander that does nothing. */
10687 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10688 rtx target ATTRIBUTE_UNUSED
,
10689 rtx subtarget ATTRIBUTE_UNUSED
,
10690 enum machine_mode mode ATTRIBUTE_UNUSED
,
10691 int ignore ATTRIBUTE_UNUSED
)
10696 /* Returns true is EXP represents data that would potentially reside
10697 in a readonly section. */
10700 readonly_data_expr (tree exp
)
10704 if (TREE_CODE (exp
) != ADDR_EXPR
)
10707 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10711 /* Make sure we call decl_readonly_section only for trees it
10712 can handle (since it returns true for everything it doesn't
10714 if (TREE_CODE (exp
) == STRING_CST
10715 || TREE_CODE (exp
) == CONSTRUCTOR
10716 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10717 return decl_readonly_section (exp
, 0);
10722 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10723 to the call, and TYPE is its return type.
10725 Return NULL_TREE if no simplification was possible, otherwise return the
10726 simplified form of the call as a tree.
10728 The simplified form may be a constant or other expression which
10729 computes the same value, but in a more efficient manner (including
10730 calls to other builtin functions).
10732 The call may contain arguments which need to be evaluated, but
10733 which are not useful to determine the result of the call. In
10734 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10735 COMPOUND_EXPR will be an argument which must be evaluated.
10736 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10737 COMPOUND_EXPR in the chain will contain the tree for the simplified
10738 form of the builtin function call. */
10741 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10743 if (!validate_arg (s1
, POINTER_TYPE
)
10744 || !validate_arg (s2
, POINTER_TYPE
))
10749 const char *p1
, *p2
;
10751 p2
= c_getstr (s2
);
10755 p1
= c_getstr (s1
);
10758 const char *r
= strstr (p1
, p2
);
10762 return build_int_cst (TREE_TYPE (s1
), 0);
10764 /* Return an offset into the constant string argument. */
10765 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10766 return fold_convert_loc (loc
, type
, tem
);
10769 /* The argument is const char *, and the result is char *, so we need
10770 a type conversion here to avoid a warning. */
10772 return fold_convert_loc (loc
, type
, s1
);
10777 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10781 /* New argument list transforming strstr(s1, s2) to
10782 strchr(s1, s2[0]). */
10783 return build_call_expr_loc (loc
, fn
, 2, s1
,
10784 build_int_cst (integer_type_node
, p2
[0]));
10788 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10789 the call, and TYPE is its return type.
10791 Return NULL_TREE if no simplification was possible, otherwise return the
10792 simplified form of the call as a tree.
10794 The simplified form may be a constant or other expression which
10795 computes the same value, but in a more efficient manner (including
10796 calls to other builtin functions).
10798 The call may contain arguments which need to be evaluated, but
10799 which are not useful to determine the result of the call. In
10800 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10801 COMPOUND_EXPR will be an argument which must be evaluated.
10802 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10803 COMPOUND_EXPR in the chain will contain the tree for the simplified
10804 form of the builtin function call. */
10807 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10809 if (!validate_arg (s1
, POINTER_TYPE
)
10810 || !validate_arg (s2
, INTEGER_TYPE
))
10816 if (TREE_CODE (s2
) != INTEGER_CST
)
10819 p1
= c_getstr (s1
);
10826 if (target_char_cast (s2
, &c
))
10829 r
= strchr (p1
, c
);
10832 return build_int_cst (TREE_TYPE (s1
), 0);
10834 /* Return an offset into the constant string argument. */
10835 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10836 return fold_convert_loc (loc
, type
, tem
);
10842 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10843 the call, and TYPE is its return type.
10845 Return NULL_TREE if no simplification was possible, otherwise return the
10846 simplified form of the call as a tree.
10848 The simplified form may be a constant or other expression which
10849 computes the same value, but in a more efficient manner (including
10850 calls to other builtin functions).
10852 The call may contain arguments which need to be evaluated, but
10853 which are not useful to determine the result of the call. In
10854 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10855 COMPOUND_EXPR will be an argument which must be evaluated.
10856 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10857 COMPOUND_EXPR in the chain will contain the tree for the simplified
10858 form of the builtin function call. */
10861 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10863 if (!validate_arg (s1
, POINTER_TYPE
)
10864 || !validate_arg (s2
, INTEGER_TYPE
))
10871 if (TREE_CODE (s2
) != INTEGER_CST
)
10874 p1
= c_getstr (s1
);
10881 if (target_char_cast (s2
, &c
))
10884 r
= strrchr (p1
, c
);
10887 return build_int_cst (TREE_TYPE (s1
), 0);
10889 /* Return an offset into the constant string argument. */
10890 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10891 return fold_convert_loc (loc
, type
, tem
);
10894 if (! integer_zerop (s2
))
10897 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10901 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10902 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10906 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10907 to the call, and TYPE is its return type.
10909 Return NULL_TREE if no simplification was possible, otherwise return the
10910 simplified form of the call as a tree.
10912 The simplified form may be a constant or other expression which
10913 computes the same value, but in a more efficient manner (including
10914 calls to other builtin functions).
10916 The call may contain arguments which need to be evaluated, but
10917 which are not useful to determine the result of the call. In
10918 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10919 COMPOUND_EXPR will be an argument which must be evaluated.
10920 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10921 COMPOUND_EXPR in the chain will contain the tree for the simplified
10922 form of the builtin function call. */
10925 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10927 if (!validate_arg (s1
, POINTER_TYPE
)
10928 || !validate_arg (s2
, POINTER_TYPE
))
10933 const char *p1
, *p2
;
10935 p2
= c_getstr (s2
);
10939 p1
= c_getstr (s1
);
10942 const char *r
= strpbrk (p1
, p2
);
10946 return build_int_cst (TREE_TYPE (s1
), 0);
10948 /* Return an offset into the constant string argument. */
10949 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10950 return fold_convert_loc (loc
, type
, tem
);
10954 /* strpbrk(x, "") == NULL.
10955 Evaluate and ignore s1 in case it had side-effects. */
10956 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
10959 return NULL_TREE
; /* Really call strpbrk. */
10961 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10965 /* New argument list transforming strpbrk(s1, s2) to
10966 strchr(s1, s2[0]). */
10967 return build_call_expr_loc (loc
, fn
, 2, s1
,
10968 build_int_cst (integer_type_node
, p2
[0]));
10972 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10973 arguments to the call.
10975 Return NULL_TREE if no simplification was possible, otherwise return the
10976 simplified form of the call as a tree.
10978 The simplified form may be a constant or other expression which
10979 computes the same value, but in a more efficient manner (including
10980 calls to other builtin functions).
10982 The call may contain arguments which need to be evaluated, but
10983 which are not useful to determine the result of the call. In
10984 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10985 COMPOUND_EXPR will be an argument which must be evaluated.
10986 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10987 COMPOUND_EXPR in the chain will contain the tree for the simplified
10988 form of the builtin function call. */
10991 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
10993 if (!validate_arg (dst
, POINTER_TYPE
)
10994 || !validate_arg (src
, POINTER_TYPE
)
10995 || !validate_arg (len
, INTEGER_TYPE
))
10999 const char *p
= c_getstr (src
);
11001 /* If the requested length is zero, or the src parameter string
11002 length is zero, return the dst parameter. */
11003 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11004 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11006 /* If the requested len is greater than or equal to the string
11007 length, call strcat. */
11008 if (TREE_CODE (len
) == INTEGER_CST
&& p
11009 && compare_tree_int (len
, strlen (p
)) >= 0)
11011 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11013 /* If the replacement _DECL isn't initialized, don't do the
11018 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11024 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11027 Return NULL_TREE if no simplification was possible, otherwise return the
11028 simplified form of the call as a tree.
11030 The simplified form may be a constant or other expression which
11031 computes the same value, but in a more efficient manner (including
11032 calls to other builtin functions).
11034 The call may contain arguments which need to be evaluated, but
11035 which are not useful to determine the result of the call. In
11036 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11037 COMPOUND_EXPR will be an argument which must be evaluated.
11038 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11039 COMPOUND_EXPR in the chain will contain the tree for the simplified
11040 form of the builtin function call. */
11043 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11045 if (!validate_arg (s1
, POINTER_TYPE
)
11046 || !validate_arg (s2
, POINTER_TYPE
))
11050 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11052 /* If both arguments are constants, evaluate at compile-time. */
11055 const size_t r
= strspn (p1
, p2
);
11056 return build_int_cst (size_type_node
, r
);
11059 /* If either argument is "", return NULL_TREE. */
11060 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11061 /* Evaluate and ignore both arguments in case either one has
11063 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11069 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11072 Return NULL_TREE if no simplification was possible, otherwise return the
11073 simplified form of the call as a tree.
11075 The simplified form may be a constant or other expression which
11076 computes the same value, but in a more efficient manner (including
11077 calls to other builtin functions).
11079 The call may contain arguments which need to be evaluated, but
11080 which are not useful to determine the result of the call. In
11081 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11082 COMPOUND_EXPR will be an argument which must be evaluated.
11083 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11084 COMPOUND_EXPR in the chain will contain the tree for the simplified
11085 form of the builtin function call. */
11088 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11090 if (!validate_arg (s1
, POINTER_TYPE
)
11091 || !validate_arg (s2
, POINTER_TYPE
))
11095 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11097 /* If both arguments are constants, evaluate at compile-time. */
11100 const size_t r
= strcspn (p1
, p2
);
11101 return build_int_cst (size_type_node
, r
);
11104 /* If the first argument is "", return NULL_TREE. */
11105 if (p1
&& *p1
== '\0')
11107 /* Evaluate and ignore argument s2 in case it has
11109 return omit_one_operand_loc (loc
, size_type_node
,
11110 size_zero_node
, s2
);
11113 /* If the second argument is "", return __builtin_strlen(s1). */
11114 if (p2
&& *p2
== '\0')
11116 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11118 /* If the replacement _DECL isn't initialized, don't do the
11123 return build_call_expr_loc (loc
, fn
, 1, s1
);
11129 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11130 produced. False otherwise. This is done so that we don't output the error
11131 or warning twice or three times. */
11134 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11136 tree fntype
= TREE_TYPE (current_function_decl
);
11137 int nargs
= call_expr_nargs (exp
);
11139 /* There is good chance the current input_location points inside the
11140 definition of the va_start macro (perhaps on the token for
11141 builtin) in a system header, so warnings will not be emitted.
11142 Use the location in real source code. */
11143 source_location current_location
=
11144 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11147 if (!stdarg_p (fntype
))
11149 error ("%<va_start%> used in function with fixed args");
11155 if (va_start_p
&& (nargs
!= 2))
11157 error ("wrong number of arguments to function %<va_start%>");
11160 arg
= CALL_EXPR_ARG (exp
, 1);
11162 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11163 when we checked the arguments and if needed issued a warning. */
11168 /* Evidently an out of date version of <stdarg.h>; can't validate
11169 va_start's second argument, but can still work as intended. */
11170 warning_at (current_location
,
11172 "%<__builtin_next_arg%> called without an argument");
11175 else if (nargs
> 1)
11177 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11180 arg
= CALL_EXPR_ARG (exp
, 0);
11183 if (TREE_CODE (arg
) == SSA_NAME
)
11184 arg
= SSA_NAME_VAR (arg
);
11186 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11187 or __builtin_next_arg (0) the first time we see it, after checking
11188 the arguments and if needed issuing a warning. */
11189 if (!integer_zerop (arg
))
11191 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11193 /* Strip off all nops for the sake of the comparison. This
11194 is not quite the same as STRIP_NOPS. It does more.
11195 We must also strip off INDIRECT_EXPR for C++ reference
11197 while (CONVERT_EXPR_P (arg
)
11198 || TREE_CODE (arg
) == INDIRECT_REF
)
11199 arg
= TREE_OPERAND (arg
, 0);
11200 if (arg
!= last_parm
)
11202 /* FIXME: Sometimes with the tree optimizers we can get the
11203 not the last argument even though the user used the last
11204 argument. We just warn and set the arg to be the last
11205 argument so that we will get wrong-code because of
11207 warning_at (current_location
,
11209 "second parameter of %<va_start%> not last named argument");
11212 /* Undefined by C99 7.15.1.4p4 (va_start):
11213 "If the parameter parmN is declared with the register storage
11214 class, with a function or array type, or with a type that is
11215 not compatible with the type that results after application of
11216 the default argument promotions, the behavior is undefined."
11218 else if (DECL_REGISTER (arg
))
11220 warning_at (current_location
,
11222 "undefined behaviour when second parameter of "
11223 "%<va_start%> is declared with %<register%> storage");
11226 /* We want to verify the second parameter just once before the tree
11227 optimizers are run and then avoid keeping it in the tree,
11228 as otherwise we could warn even for correct code like:
11229 void foo (int i, ...)
11230 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11232 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11234 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11240 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11241 ORIG may be null if this is a 2-argument call. We don't attempt to
11242 simplify calls with more than 3 arguments.
11244 Return NULL_TREE if no simplification was possible, otherwise return the
11245 simplified form of the call as a tree. If IGNORED is true, it means that
11246 the caller does not use the returned value of the function. */
11249 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
11250 tree orig
, int ignored
)
11253 const char *fmt_str
= NULL
;
11255 /* Verify the required arguments in the original call. We deal with two
11256 types of sprintf() calls: 'sprintf (str, fmt)' and
11257 'sprintf (dest, "%s", orig)'. */
11258 if (!validate_arg (dest
, POINTER_TYPE
)
11259 || !validate_arg (fmt
, POINTER_TYPE
))
11261 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11264 /* Check whether the format is a literal string constant. */
11265 fmt_str
= c_getstr (fmt
);
11266 if (fmt_str
== NULL
)
11270 retval
= NULL_TREE
;
11272 if (!init_target_chars ())
11275 /* If the format doesn't contain % args or %%, use strcpy. */
11276 if (strchr (fmt_str
, target_percent
) == NULL
)
11278 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11283 /* Don't optimize sprintf (buf, "abc", ptr++). */
11287 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11288 'format' is known to contain no % formats. */
11289 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
11291 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
11294 /* If the format is "%s", use strcpy if the result isn't used. */
11295 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11298 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11303 /* Don't crash on sprintf (str1, "%s"). */
11307 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11310 retval
= c_strlen (orig
, 1);
11311 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11314 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
11317 if (call
&& retval
)
11319 retval
= fold_convert_loc
11320 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
11322 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11328 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11329 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11330 attempt to simplify calls with more than 4 arguments.
11332 Return NULL_TREE if no simplification was possible, otherwise return the
11333 simplified form of the call as a tree. If IGNORED is true, it means that
11334 the caller does not use the returned value of the function. */
11337 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
11338 tree orig
, int ignored
)
11341 const char *fmt_str
= NULL
;
11342 unsigned HOST_WIDE_INT destlen
;
11344 /* Verify the required arguments in the original call. We deal with two
11345 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11346 'snprintf (dest, cst, "%s", orig)'. */
11347 if (!validate_arg (dest
, POINTER_TYPE
)
11348 || !validate_arg (destsize
, INTEGER_TYPE
)
11349 || !validate_arg (fmt
, POINTER_TYPE
))
11351 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11354 if (!tree_fits_uhwi_p (destsize
))
11357 /* Check whether the format is a literal string constant. */
11358 fmt_str
= c_getstr (fmt
);
11359 if (fmt_str
== NULL
)
11363 retval
= NULL_TREE
;
11365 if (!init_target_chars ())
11368 destlen
= tree_to_uhwi (destsize
);
11370 /* If the format doesn't contain % args or %%, use strcpy. */
11371 if (strchr (fmt_str
, target_percent
) == NULL
)
11373 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11374 size_t len
= strlen (fmt_str
);
11376 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11380 /* We could expand this as
11381 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11383 memcpy (str, fmt_with_nul_at_cstm1, cst);
11384 but in the former case that might increase code size
11385 and in the latter case grow .rodata section too much.
11386 So punt for now. */
11387 if (len
>= destlen
)
11393 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11394 'format' is known to contain no % formats and
11395 strlen (fmt) < cst. */
11396 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
11399 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
11402 /* If the format is "%s", use strcpy if the result isn't used. */
11403 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11405 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11406 unsigned HOST_WIDE_INT origlen
;
11408 /* Don't crash on snprintf (str1, cst, "%s"). */
11412 retval
= c_strlen (orig
, 1);
11413 if (!retval
|| !tree_fits_uhwi_p (retval
))
11416 origlen
= tree_to_uhwi (retval
);
11417 /* We could expand this as
11418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11420 memcpy (str1, str2_with_nul_at_cstm1, cst);
11421 but in the former case that might increase code size
11422 and in the latter case grow .rodata section too much.
11423 So punt for now. */
11424 if (origlen
>= destlen
)
11427 /* Convert snprintf (str1, cst, "%s", str2) into
11428 strcpy (str1, str2) if strlen (str2) < cst. */
11432 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
11435 retval
= NULL_TREE
;
11438 if (call
&& retval
)
11440 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
11441 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
11442 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11448 /* Expand a call EXP to __builtin_object_size. */
11451 expand_builtin_object_size (tree exp
)
11454 int object_size_type
;
11455 tree fndecl
= get_callee_fndecl (exp
);
11457 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11459 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11461 expand_builtin_trap ();
11465 ost
= CALL_EXPR_ARG (exp
, 1);
11468 if (TREE_CODE (ost
) != INTEGER_CST
11469 || tree_int_cst_sgn (ost
) < 0
11470 || compare_tree_int (ost
, 3) > 0)
11472 error ("%Klast argument of %D is not integer constant between 0 and 3",
11474 expand_builtin_trap ();
11478 object_size_type
= tree_to_shwi (ost
);
11480 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11483 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11484 FCODE is the BUILT_IN_* to use.
11485 Return NULL_RTX if we failed; the caller should emit a normal call,
11486 otherwise try to get the result in TARGET, if convenient (and in
11487 mode MODE if that's convenient). */
11490 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11491 enum built_in_function fcode
)
11493 tree dest
, src
, len
, size
;
11495 if (!validate_arglist (exp
,
11497 fcode
== BUILT_IN_MEMSET_CHK
11498 ? INTEGER_TYPE
: POINTER_TYPE
,
11499 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11502 dest
= CALL_EXPR_ARG (exp
, 0);
11503 src
= CALL_EXPR_ARG (exp
, 1);
11504 len
= CALL_EXPR_ARG (exp
, 2);
11505 size
= CALL_EXPR_ARG (exp
, 3);
11507 if (! tree_fits_uhwi_p (size
))
11510 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11514 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11516 warning_at (tree_nonartificial_location (exp
),
11517 0, "%Kcall to %D will always overflow destination buffer",
11518 exp
, get_callee_fndecl (exp
));
11523 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11524 mem{cpy,pcpy,move,set} is available. */
11527 case BUILT_IN_MEMCPY_CHK
:
11528 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11530 case BUILT_IN_MEMPCPY_CHK
:
11531 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11533 case BUILT_IN_MEMMOVE_CHK
:
11534 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11536 case BUILT_IN_MEMSET_CHK
:
11537 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11546 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11547 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11548 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11549 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11551 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11555 unsigned int dest_align
= get_pointer_alignment (dest
);
11557 /* If DEST is not a pointer type, call the normal function. */
11558 if (dest_align
== 0)
11561 /* If SRC and DEST are the same (and not volatile), do nothing. */
11562 if (operand_equal_p (src
, dest
, 0))
11566 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11568 /* Evaluate and ignore LEN in case it has side-effects. */
11569 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11570 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11573 expr
= fold_build_pointer_plus (dest
, len
);
11574 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11577 /* __memmove_chk special case. */
11578 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11580 unsigned int src_align
= get_pointer_alignment (src
);
11582 if (src_align
== 0)
11585 /* If src is categorized for a readonly section we can use
11586 normal __memcpy_chk. */
11587 if (readonly_data_expr (src
))
11589 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11592 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11593 dest
, src
, len
, size
);
11594 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11595 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11596 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11603 /* Emit warning if a buffer overflow is detected at compile time. */
11606 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11610 location_t loc
= tree_nonartificial_location (exp
);
11614 case BUILT_IN_STRCPY_CHK
:
11615 case BUILT_IN_STPCPY_CHK
:
11616 /* For __strcat_chk the warning will be emitted only if overflowing
11617 by at least strlen (dest) + 1 bytes. */
11618 case BUILT_IN_STRCAT_CHK
:
11619 len
= CALL_EXPR_ARG (exp
, 1);
11620 size
= CALL_EXPR_ARG (exp
, 2);
11623 case BUILT_IN_STRNCAT_CHK
:
11624 case BUILT_IN_STRNCPY_CHK
:
11625 case BUILT_IN_STPNCPY_CHK
:
11626 len
= CALL_EXPR_ARG (exp
, 2);
11627 size
= CALL_EXPR_ARG (exp
, 3);
11629 case BUILT_IN_SNPRINTF_CHK
:
11630 case BUILT_IN_VSNPRINTF_CHK
:
11631 len
= CALL_EXPR_ARG (exp
, 1);
11632 size
= CALL_EXPR_ARG (exp
, 3);
11635 gcc_unreachable ();
11641 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11646 len
= c_strlen (len
, 1);
11647 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11650 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11652 tree src
= CALL_EXPR_ARG (exp
, 1);
11653 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11655 src
= c_strlen (src
, 1);
11656 if (! src
|| ! tree_fits_uhwi_p (src
))
11658 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11659 exp
, get_callee_fndecl (exp
));
11662 else if (tree_int_cst_lt (src
, size
))
11665 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11668 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11669 exp
, get_callee_fndecl (exp
));
11672 /* Emit warning if a buffer overflow is detected at compile time
11673 in __sprintf_chk/__vsprintf_chk calls. */
11676 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11678 tree size
, len
, fmt
;
11679 const char *fmt_str
;
11680 int nargs
= call_expr_nargs (exp
);
11682 /* Verify the required arguments in the original call. */
11686 size
= CALL_EXPR_ARG (exp
, 2);
11687 fmt
= CALL_EXPR_ARG (exp
, 3);
11689 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11692 /* Check whether the format is a literal string constant. */
11693 fmt_str
= c_getstr (fmt
);
11694 if (fmt_str
== NULL
)
11697 if (!init_target_chars ())
11700 /* If the format doesn't contain % args or %%, we know its size. */
11701 if (strchr (fmt_str
, target_percent
) == 0)
11702 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11703 /* If the format is "%s" and first ... argument is a string literal,
11705 else if (fcode
== BUILT_IN_SPRINTF_CHK
11706 && strcmp (fmt_str
, target_percent_s
) == 0)
11712 arg
= CALL_EXPR_ARG (exp
, 4);
11713 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11716 len
= c_strlen (arg
, 1);
11717 if (!len
|| ! tree_fits_uhwi_p (len
))
11723 if (! tree_int_cst_lt (len
, size
))
11724 warning_at (tree_nonartificial_location (exp
),
11725 0, "%Kcall to %D will always overflow destination buffer",
11726 exp
, get_callee_fndecl (exp
));
11729 /* Emit warning if a free is called with address of a variable. */
11732 maybe_emit_free_warning (tree exp
)
11734 tree arg
= CALL_EXPR_ARG (exp
, 0);
11737 if (TREE_CODE (arg
) != ADDR_EXPR
)
11740 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11741 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11744 if (SSA_VAR_P (arg
))
11745 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11746 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11748 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11749 "%Kattempt to free a non-heap object", exp
);
11752 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11756 fold_builtin_object_size (tree ptr
, tree ost
)
11758 unsigned HOST_WIDE_INT bytes
;
11759 int object_size_type
;
11761 if (!validate_arg (ptr
, POINTER_TYPE
)
11762 || !validate_arg (ost
, INTEGER_TYPE
))
11767 if (TREE_CODE (ost
) != INTEGER_CST
11768 || tree_int_cst_sgn (ost
) < 0
11769 || compare_tree_int (ost
, 3) > 0)
11772 object_size_type
= tree_to_shwi (ost
);
11774 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11775 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11776 and (size_t) 0 for types 2 and 3. */
11777 if (TREE_SIDE_EFFECTS (ptr
))
11778 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11780 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11782 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11783 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11784 return build_int_cstu (size_type_node
, bytes
);
11786 else if (TREE_CODE (ptr
) == SSA_NAME
)
11788 /* If object size is not known yet, delay folding until
11789 later. Maybe subsequent passes will help determining
11791 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11792 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11793 && wi::fits_to_tree_p (bytes
, size_type_node
))
11794 return build_int_cstu (size_type_node
, bytes
);
11800 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11801 are the arguments to the call. */
11804 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
11805 tree src
, tree size
)
11810 if (!validate_arg (dest
, POINTER_TYPE
)
11811 || !validate_arg (src
, POINTER_TYPE
)
11812 || !validate_arg (size
, INTEGER_TYPE
))
11815 p
= c_getstr (src
);
11816 /* If the SRC parameter is "", return DEST. */
11817 if (p
&& *p
== '\0')
11818 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11820 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
11823 /* If __builtin_strcat_chk is used, assume strcat is available. */
11824 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
11828 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
11831 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11835 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
11836 tree dest
, tree src
, tree len
, tree size
)
11841 if (!validate_arg (dest
, POINTER_TYPE
)
11842 || !validate_arg (src
, POINTER_TYPE
)
11843 || !validate_arg (size
, INTEGER_TYPE
)
11844 || !validate_arg (size
, INTEGER_TYPE
))
11847 p
= c_getstr (src
);
11848 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11849 if (p
&& *p
== '\0')
11850 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11851 else if (integer_zerop (len
))
11852 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11854 if (! tree_fits_uhwi_p (size
))
11857 if (! integer_all_onesp (size
))
11859 tree src_len
= c_strlen (src
, 1);
11861 && tree_fits_uhwi_p (src_len
)
11862 && tree_fits_uhwi_p (len
)
11863 && ! tree_int_cst_lt (len
, src_len
))
11865 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11866 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
11870 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
11875 /* If __builtin_strncat_chk is used, assume strncat is available. */
11876 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
11880 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
11883 /* Builtins with folding operations that operate on "..." arguments
11884 need special handling; we need to store the arguments in a convenient
11885 data structure before attempting any folding. Fortunately there are
11886 only a few builtins that fall into this category. FNDECL is the
11887 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11888 result of the function call is ignored. */
11891 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
11892 bool ignore ATTRIBUTE_UNUSED
)
11894 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11895 tree ret
= NULL_TREE
;
11899 case BUILT_IN_FPCLASSIFY
:
11900 ret
= fold_builtin_fpclassify (loc
, exp
);
11908 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11909 SET_EXPR_LOCATION (ret
, loc
);
11910 TREE_NO_WARNING (ret
) = 1;
11916 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11917 FMT and ARG are the arguments to the call; we don't fold cases with
11918 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11920 Return NULL_TREE if no simplification was possible, otherwise return the
11921 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11922 code of the function to be simplified. */
11925 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
11926 tree arg
, bool ignore
,
11927 enum built_in_function fcode
)
11929 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
11930 const char *fmt_str
= NULL
;
11932 /* If the return value is used, don't do the transformation. */
11936 /* Verify the required arguments in the original call. */
11937 if (!validate_arg (fmt
, POINTER_TYPE
))
11940 /* Check whether the format is a literal string constant. */
11941 fmt_str
= c_getstr (fmt
);
11942 if (fmt_str
== NULL
)
11945 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
11947 /* If we're using an unlocked function, assume the other
11948 unlocked functions exist explicitly. */
11949 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
11950 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
11954 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
11955 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
11958 if (!init_target_chars ())
11961 if (strcmp (fmt_str
, target_percent_s
) == 0
11962 || strchr (fmt_str
, target_percent
) == NULL
)
11966 if (strcmp (fmt_str
, target_percent_s
) == 0)
11968 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
11971 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11974 str
= c_getstr (arg
);
11980 /* The format specifier doesn't contain any '%' characters. */
11981 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
11987 /* If the string was "", printf does nothing. */
11988 if (str
[0] == '\0')
11989 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
11991 /* If the string has length of 1, call putchar. */
11992 if (str
[1] == '\0')
11994 /* Given printf("c"), (where c is any one character,)
11995 convert "c"[0] to an int and pass that to the replacement
11997 newarg
= build_int_cst (integer_type_node
, str
[0]);
11999 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
12003 /* If the string was "string\n", call puts("string"). */
12004 size_t len
= strlen (str
);
12005 if ((unsigned char)str
[len
- 1] == target_newline
12006 && (size_t) (int) len
== len
12010 tree offset_node
, string_cst
;
12012 /* Create a NUL-terminated string that's one char shorter
12013 than the original, stripping off the trailing '\n'. */
12014 newarg
= build_string_literal (len
, str
);
12015 string_cst
= string_constant (newarg
, &offset_node
);
12016 gcc_checking_assert (string_cst
12017 && (TREE_STRING_LENGTH (string_cst
)
12019 && integer_zerop (offset_node
)
12021 TREE_STRING_POINTER (string_cst
)[len
- 1]
12022 == target_newline
);
12023 /* build_string_literal creates a new STRING_CST,
12024 modify it in place to avoid double copying. */
12025 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
12026 newstr
[len
- 1] = '\0';
12028 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
12031 /* We'd like to arrange to call fputs(string,stdout) here,
12032 but we need stdout and don't have a way to get it yet. */
12037 /* The other optimizations can be done only on the non-va_list variants. */
12038 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12041 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12042 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12044 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12047 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
12050 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12051 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12053 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12056 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
12062 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12065 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12066 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12067 more than 3 arguments, and ARG may be null in the 2-argument case.
12069 Return NULL_TREE if no simplification was possible, otherwise return the
12070 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12071 code of the function to be simplified. */
12074 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
12075 tree fmt
, tree arg
, bool ignore
,
12076 enum built_in_function fcode
)
12078 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12079 const char *fmt_str
= NULL
;
12081 /* If the return value is used, don't do the transformation. */
12085 /* Verify the required arguments in the original call. */
12086 if (!validate_arg (fp
, POINTER_TYPE
))
12088 if (!validate_arg (fmt
, POINTER_TYPE
))
12091 /* Check whether the format is a literal string constant. */
12092 fmt_str
= c_getstr (fmt
);
12093 if (fmt_str
== NULL
)
12096 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12098 /* If we're using an unlocked function, assume the other
12099 unlocked functions exist explicitly. */
12100 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
12101 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
12105 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
12106 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
12109 if (!init_target_chars ())
12112 /* If the format doesn't contain % args or %%, use strcpy. */
12113 if (strchr (fmt_str
, target_percent
) == NULL
)
12115 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12119 /* If the format specifier was "", fprintf does nothing. */
12120 if (fmt_str
[0] == '\0')
12122 /* If FP has side-effects, just wait until gimplification is
12124 if (TREE_SIDE_EFFECTS (fp
))
12127 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12130 /* When "string" doesn't contain %, replace all cases of
12131 fprintf (fp, string) with fputs (string, fp). The fputs
12132 builtin will take care of special cases like length == 1. */
12134 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
12137 /* The other optimizations can be done only on the non-va_list variants. */
12138 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12141 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12142 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12144 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12147 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
12150 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12151 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12153 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12156 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
12161 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12164 /* Initialize format string characters in the target charset. */
12167 init_target_chars (void)
12172 target_newline
= lang_hooks
.to_target_charset ('\n');
12173 target_percent
= lang_hooks
.to_target_charset ('%');
12174 target_c
= lang_hooks
.to_target_charset ('c');
12175 target_s
= lang_hooks
.to_target_charset ('s');
12176 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12180 target_percent_c
[0] = target_percent
;
12181 target_percent_c
[1] = target_c
;
12182 target_percent_c
[2] = '\0';
12184 target_percent_s
[0] = target_percent
;
12185 target_percent_s
[1] = target_s
;
12186 target_percent_s
[2] = '\0';
12188 target_percent_s_newline
[0] = target_percent
;
12189 target_percent_s_newline
[1] = target_s
;
12190 target_percent_s_newline
[2] = target_newline
;
12191 target_percent_s_newline
[3] = '\0';
12198 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12199 and no overflow/underflow occurred. INEXACT is true if M was not
12200 exactly calculated. TYPE is the tree type for the result. This
12201 function assumes that you cleared the MPFR flags and then
12202 calculated M to see if anything subsequently set a flag prior to
12203 entering this function. Return NULL_TREE if any checks fail. */
12206 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12208 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12209 overflow/underflow occurred. If -frounding-math, proceed iff the
12210 result of calling FUNC was exact. */
12211 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12212 && (!flag_rounding_math
|| !inexact
))
12214 REAL_VALUE_TYPE rr
;
12216 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12217 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12218 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12219 but the mpft_t is not, then we underflowed in the
12221 if (real_isfinite (&rr
)
12222 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12224 REAL_VALUE_TYPE rmode
;
12226 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12227 /* Proceed iff the specified mode can hold the value. */
12228 if (real_identical (&rmode
, &rr
))
12229 return build_real (type
, rmode
);
12235 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12236 number and no overflow/underflow occurred. INEXACT is true if M
12237 was not exactly calculated. TYPE is the tree type for the result.
12238 This function assumes that you cleared the MPFR flags and then
12239 calculated M to see if anything subsequently set a flag prior to
12240 entering this function. Return NULL_TREE if any checks fail, if
12241 FORCE_CONVERT is true, then bypass the checks. */
12244 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
12246 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12247 overflow/underflow occurred. If -frounding-math, proceed iff the
12248 result of calling FUNC was exact. */
12250 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
12251 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12252 && (!flag_rounding_math
|| !inexact
)))
12254 REAL_VALUE_TYPE re
, im
;
12256 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
12257 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
12258 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12259 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12260 but the mpft_t is not, then we underflowed in the
12263 || (real_isfinite (&re
) && real_isfinite (&im
)
12264 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
12265 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
12267 REAL_VALUE_TYPE re_mode
, im_mode
;
12269 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
12270 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
12271 /* Proceed iff the specified mode can hold the value. */
12273 || (real_identical (&re_mode
, &re
)
12274 && real_identical (&im_mode
, &im
)))
12275 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
12276 build_real (TREE_TYPE (type
), im_mode
));
12282 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12283 FUNC on it and return the resulting value as a tree with type TYPE.
12284 If MIN and/or MAX are not NULL, then the supplied ARG must be
12285 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12286 acceptable values, otherwise they are not. The mpfr precision is
12287 set to the precision of TYPE. We assume that function FUNC returns
12288 zero if the result could be calculated exactly within the requested
12292 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12293 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12296 tree result
= NULL_TREE
;
12300 /* To proceed, MPFR must exactly represent the target floating point
12301 format, which only happens when the target base equals two. */
12302 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12303 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12305 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12307 if (real_isfinite (ra
)
12308 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12309 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12311 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12312 const int prec
= fmt
->p
;
12313 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12317 mpfr_init2 (m
, prec
);
12318 mpfr_from_real (m
, ra
, GMP_RNDN
);
12319 mpfr_clear_flags ();
12320 inexact
= func (m
, m
, rnd
);
12321 result
= do_mpfr_ckconv (m
, type
, inexact
);
12329 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12330 FUNC on it and return the resulting value as a tree with type TYPE.
12331 The mpfr precision is set to the precision of TYPE. We assume that
12332 function FUNC returns zero if the result could be calculated
12333 exactly within the requested precision. */
12336 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12337 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12339 tree result
= NULL_TREE
;
12344 /* To proceed, MPFR must exactly represent the target floating point
12345 format, which only happens when the target base equals two. */
12346 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12347 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12348 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12350 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12351 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12353 if (real_isfinite (ra1
) && real_isfinite (ra2
))
12355 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12356 const int prec
= fmt
->p
;
12357 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12361 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12362 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12363 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12364 mpfr_clear_flags ();
12365 inexact
= func (m1
, m1
, m2
, rnd
);
12366 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12367 mpfr_clears (m1
, m2
, NULL
);
12374 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12375 FUNC on it and return the resulting value as a tree with type TYPE.
12376 The mpfr precision is set to the precision of TYPE. We assume that
12377 function FUNC returns zero if the result could be calculated
12378 exactly within the requested precision. */
12381 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12382 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12384 tree result
= NULL_TREE
;
12390 /* To proceed, MPFR must exactly represent the target floating point
12391 format, which only happens when the target base equals two. */
12392 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12393 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12394 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12395 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12397 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12398 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12399 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12401 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
12403 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12404 const int prec
= fmt
->p
;
12405 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12409 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12410 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12411 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12412 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
12413 mpfr_clear_flags ();
12414 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
12415 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12416 mpfr_clears (m1
, m2
, m3
, NULL
);
12423 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12424 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12425 If ARG_SINP and ARG_COSP are NULL then the result is returned
12426 as a complex value.
12427 The type is taken from the type of ARG and is used for setting the
12428 precision of the calculation and results. */
12431 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12433 tree
const type
= TREE_TYPE (arg
);
12434 tree result
= NULL_TREE
;
12438 /* To proceed, MPFR must exactly represent the target floating point
12439 format, which only happens when the target base equals two. */
12440 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12441 && TREE_CODE (arg
) == REAL_CST
12442 && !TREE_OVERFLOW (arg
))
12444 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12446 if (real_isfinite (ra
))
12448 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12449 const int prec
= fmt
->p
;
12450 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12451 tree result_s
, result_c
;
12455 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12456 mpfr_from_real (m
, ra
, GMP_RNDN
);
12457 mpfr_clear_flags ();
12458 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
12459 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12460 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12461 mpfr_clears (m
, ms
, mc
, NULL
);
12462 if (result_s
&& result_c
)
12464 /* If we are to return in a complex value do so. */
12465 if (!arg_sinp
&& !arg_cosp
)
12466 return build_complex (build_complex_type (type
),
12467 result_c
, result_s
);
12469 /* Dereference the sin/cos pointer arguments. */
12470 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12471 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12472 /* Proceed if valid pointer type were passed in. */
12473 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12474 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12476 /* Set the values. */
12477 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12479 TREE_SIDE_EFFECTS (result_s
) = 1;
12480 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12482 TREE_SIDE_EFFECTS (result_c
) = 1;
12483 /* Combine the assignments into a compound expr. */
12484 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12485 result_s
, result_c
));
12493 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12494 two-argument mpfr order N Bessel function FUNC on them and return
12495 the resulting value as a tree with type TYPE. The mpfr precision
12496 is set to the precision of TYPE. We assume that function FUNC
12497 returns zero if the result could be calculated exactly within the
12498 requested precision. */
12500 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12501 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12502 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12504 tree result
= NULL_TREE
;
12509 /* To proceed, MPFR must exactly represent the target floating point
12510 format, which only happens when the target base equals two. */
12511 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12512 && tree_fits_shwi_p (arg1
)
12513 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12515 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12516 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12519 && real_isfinite (ra
)
12520 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12522 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12523 const int prec
= fmt
->p
;
12524 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12528 mpfr_init2 (m
, prec
);
12529 mpfr_from_real (m
, ra
, GMP_RNDN
);
12530 mpfr_clear_flags ();
12531 inexact
= func (m
, n
, m
, rnd
);
12532 result
= do_mpfr_ckconv (m
, type
, inexact
);
12540 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12541 the pointer *(ARG_QUO) and return the result. The type is taken
12542 from the type of ARG0 and is used for setting the precision of the
12543 calculation and results. */
12546 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12548 tree
const type
= TREE_TYPE (arg0
);
12549 tree result
= NULL_TREE
;
12554 /* To proceed, MPFR must exactly represent the target floating point
12555 format, which only happens when the target base equals two. */
12556 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12557 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12558 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12560 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12561 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12563 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12565 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12566 const int prec
= fmt
->p
;
12567 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12572 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12573 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12574 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12575 mpfr_clear_flags ();
12576 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12577 /* Remquo is independent of the rounding mode, so pass
12578 inexact=0 to do_mpfr_ckconv(). */
12579 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12580 mpfr_clears (m0
, m1
, NULL
);
12583 /* MPFR calculates quo in the host's long so it may
12584 return more bits in quo than the target int can hold
12585 if sizeof(host long) > sizeof(target int). This can
12586 happen even for native compilers in LP64 mode. In
12587 these cases, modulo the quo value with the largest
12588 number that the target int can hold while leaving one
12589 bit for the sign. */
12590 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12591 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12593 /* Dereference the quo pointer argument. */
12594 arg_quo
= build_fold_indirect_ref (arg_quo
);
12595 /* Proceed iff a valid pointer type was passed in. */
12596 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12598 /* Set the value. */
12600 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12601 build_int_cst (TREE_TYPE (arg_quo
),
12603 TREE_SIDE_EFFECTS (result_quo
) = 1;
12604 /* Combine the quo assignment with the rem. */
12605 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12606 result_quo
, result_rem
));
12614 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12615 resulting value as a tree with type TYPE. The mpfr precision is
12616 set to the precision of TYPE. We assume that this mpfr function
12617 returns zero if the result could be calculated exactly within the
12618 requested precision. In addition, the integer pointer represented
12619 by ARG_SG will be dereferenced and set to the appropriate signgam
12623 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12625 tree result
= NULL_TREE
;
12629 /* To proceed, MPFR must exactly represent the target floating point
12630 format, which only happens when the target base equals two. Also
12631 verify ARG is a constant and that ARG_SG is an int pointer. */
12632 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12633 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12634 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12635 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12637 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12639 /* In addition to NaN and Inf, the argument cannot be zero or a
12640 negative integer. */
12641 if (real_isfinite (ra
)
12642 && ra
->cl
!= rvc_zero
12643 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12645 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12646 const int prec
= fmt
->p
;
12647 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12652 mpfr_init2 (m
, prec
);
12653 mpfr_from_real (m
, ra
, GMP_RNDN
);
12654 mpfr_clear_flags ();
12655 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12656 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12662 /* Dereference the arg_sg pointer argument. */
12663 arg_sg
= build_fold_indirect_ref (arg_sg
);
12664 /* Assign the signgam value into *arg_sg. */
12665 result_sg
= fold_build2 (MODIFY_EXPR
,
12666 TREE_TYPE (arg_sg
), arg_sg
,
12667 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12668 TREE_SIDE_EFFECTS (result_sg
) = 1;
12669 /* Combine the signgam assignment with the lgamma result. */
12670 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12671 result_sg
, result_lg
));
12679 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12680 function FUNC on it and return the resulting value as a tree with
12681 type TYPE. The mpfr precision is set to the precision of TYPE. We
12682 assume that function FUNC returns zero if the result could be
12683 calculated exactly within the requested precision. */
12686 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12688 tree result
= NULL_TREE
;
12692 /* To proceed, MPFR must exactly represent the target floating point
12693 format, which only happens when the target base equals two. */
12694 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12695 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12696 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12698 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12699 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12701 if (real_isfinite (re
) && real_isfinite (im
))
12703 const struct real_format
*const fmt
=
12704 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12705 const int prec
= fmt
->p
;
12706 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12707 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12711 mpc_init2 (m
, prec
);
12712 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12713 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12714 mpfr_clear_flags ();
12715 inexact
= func (m
, m
, crnd
);
12716 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12724 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12725 mpc function FUNC on it and return the resulting value as a tree
12726 with type TYPE. The mpfr precision is set to the precision of
12727 TYPE. We assume that function FUNC returns zero if the result
12728 could be calculated exactly within the requested precision. If
12729 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12730 in the arguments and/or results. */
12733 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12734 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12736 tree result
= NULL_TREE
;
12741 /* To proceed, MPFR must exactly represent the target floating point
12742 format, which only happens when the target base equals two. */
12743 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12744 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12745 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12746 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12747 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12749 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12750 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12751 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12752 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12755 || (real_isfinite (re0
) && real_isfinite (im0
)
12756 && real_isfinite (re1
) && real_isfinite (im1
)))
12758 const struct real_format
*const fmt
=
12759 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12760 const int prec
= fmt
->p
;
12761 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12762 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12766 mpc_init2 (m0
, prec
);
12767 mpc_init2 (m1
, prec
);
12768 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12769 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12770 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12771 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12772 mpfr_clear_flags ();
12773 inexact
= func (m0
, m0
, m1
, crnd
);
12774 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12783 /* A wrapper function for builtin folding that prevents warnings for
12784 "statement without effect" and the like, caused by removing the
12785 call node earlier than the warning is generated. */
12788 fold_call_stmt (gimple stmt
, bool ignore
)
12790 tree ret
= NULL_TREE
;
12791 tree fndecl
= gimple_call_fndecl (stmt
);
12792 location_t loc
= gimple_location (stmt
);
12794 && TREE_CODE (fndecl
) == FUNCTION_DECL
12795 && DECL_BUILT_IN (fndecl
)
12796 && !gimple_call_va_arg_pack_p (stmt
))
12798 int nargs
= gimple_call_num_args (stmt
);
12799 tree
*args
= (nargs
> 0
12800 ? gimple_call_arg_ptr (stmt
, 0)
12801 : &error_mark_node
);
12803 if (avoid_folding_inline_builtin (fndecl
))
12805 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12807 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12811 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
12812 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12815 /* Propagate location information from original call to
12816 expansion of builtin. Otherwise things like
12817 maybe_emit_chk_warning, that operate on the expansion
12818 of a builtin, will use the wrong location information. */
12819 if (gimple_has_location (stmt
))
12821 tree realret
= ret
;
12822 if (TREE_CODE (ret
) == NOP_EXPR
)
12823 realret
= TREE_OPERAND (ret
, 0);
12824 if (CAN_HAVE_LOCATION_P (realret
)
12825 && !EXPR_HAS_LOCATION (realret
))
12826 SET_EXPR_LOCATION (realret
, loc
);
12836 /* Look up the function in builtin_decl that corresponds to DECL
12837 and set ASMSPEC as its user assembler name. DECL must be a
12838 function decl that declares a builtin. */
12841 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12844 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12845 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12848 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12849 set_user_assembler_name (builtin
, asmspec
);
12850 switch (DECL_FUNCTION_CODE (decl
))
12852 case BUILT_IN_MEMCPY
:
12853 init_block_move_fn (asmspec
);
12854 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12856 case BUILT_IN_MEMSET
:
12857 init_block_clear_fn (asmspec
);
12858 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12860 case BUILT_IN_MEMMOVE
:
12861 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12863 case BUILT_IN_MEMCMP
:
12864 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12866 case BUILT_IN_ABORT
:
12867 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12870 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12872 set_user_assembler_libfunc ("ffs", asmspec
);
12873 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12874 MODE_INT
, 0), "ffs");
12882 /* Return true if DECL is a builtin that expands to a constant or similarly
12885 is_simple_builtin (tree decl
)
12887 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12888 switch (DECL_FUNCTION_CODE (decl
))
12890 /* Builtins that expand to constants. */
12891 case BUILT_IN_CONSTANT_P
:
12892 case BUILT_IN_EXPECT
:
12893 case BUILT_IN_OBJECT_SIZE
:
12894 case BUILT_IN_UNREACHABLE
:
12895 /* Simple register moves or loads from stack. */
12896 case BUILT_IN_ASSUME_ALIGNED
:
12897 case BUILT_IN_RETURN_ADDRESS
:
12898 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12899 case BUILT_IN_FROB_RETURN_ADDR
:
12900 case BUILT_IN_RETURN
:
12901 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12902 case BUILT_IN_FRAME_ADDRESS
:
12903 case BUILT_IN_VA_END
:
12904 case BUILT_IN_STACK_SAVE
:
12905 case BUILT_IN_STACK_RESTORE
:
12906 /* Exception state returns or moves registers around. */
12907 case BUILT_IN_EH_FILTER
:
12908 case BUILT_IN_EH_POINTER
:
12909 case BUILT_IN_EH_COPY_VALUES
:
12919 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12920 most probably expanded inline into reasonably simple code. This is a
12921 superset of is_simple_builtin. */
12923 is_inexpensive_builtin (tree decl
)
12927 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12929 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12930 switch (DECL_FUNCTION_CODE (decl
))
12933 case BUILT_IN_ALLOCA
:
12934 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12935 case BUILT_IN_BSWAP16
:
12936 case BUILT_IN_BSWAP32
:
12937 case BUILT_IN_BSWAP64
:
12939 case BUILT_IN_CLZIMAX
:
12940 case BUILT_IN_CLZL
:
12941 case BUILT_IN_CLZLL
:
12943 case BUILT_IN_CTZIMAX
:
12944 case BUILT_IN_CTZL
:
12945 case BUILT_IN_CTZLL
:
12947 case BUILT_IN_FFSIMAX
:
12948 case BUILT_IN_FFSL
:
12949 case BUILT_IN_FFSLL
:
12950 case BUILT_IN_IMAXABS
:
12951 case BUILT_IN_FINITE
:
12952 case BUILT_IN_FINITEF
:
12953 case BUILT_IN_FINITEL
:
12954 case BUILT_IN_FINITED32
:
12955 case BUILT_IN_FINITED64
:
12956 case BUILT_IN_FINITED128
:
12957 case BUILT_IN_FPCLASSIFY
:
12958 case BUILT_IN_ISFINITE
:
12959 case BUILT_IN_ISINF_SIGN
:
12960 case BUILT_IN_ISINF
:
12961 case BUILT_IN_ISINFF
:
12962 case BUILT_IN_ISINFL
:
12963 case BUILT_IN_ISINFD32
:
12964 case BUILT_IN_ISINFD64
:
12965 case BUILT_IN_ISINFD128
:
12966 case BUILT_IN_ISNAN
:
12967 case BUILT_IN_ISNANF
:
12968 case BUILT_IN_ISNANL
:
12969 case BUILT_IN_ISNAND32
:
12970 case BUILT_IN_ISNAND64
:
12971 case BUILT_IN_ISNAND128
:
12972 case BUILT_IN_ISNORMAL
:
12973 case BUILT_IN_ISGREATER
:
12974 case BUILT_IN_ISGREATEREQUAL
:
12975 case BUILT_IN_ISLESS
:
12976 case BUILT_IN_ISLESSEQUAL
:
12977 case BUILT_IN_ISLESSGREATER
:
12978 case BUILT_IN_ISUNORDERED
:
12979 case BUILT_IN_VA_ARG_PACK
:
12980 case BUILT_IN_VA_ARG_PACK_LEN
:
12981 case BUILT_IN_VA_COPY
:
12982 case BUILT_IN_TRAP
:
12983 case BUILT_IN_SAVEREGS
:
12984 case BUILT_IN_POPCOUNTL
:
12985 case BUILT_IN_POPCOUNTLL
:
12986 case BUILT_IN_POPCOUNTIMAX
:
12987 case BUILT_IN_POPCOUNT
:
12988 case BUILT_IN_PARITYL
:
12989 case BUILT_IN_PARITYLL
:
12990 case BUILT_IN_PARITYIMAX
:
12991 case BUILT_IN_PARITY
:
12992 case BUILT_IN_LABS
:
12993 case BUILT_IN_LLABS
:
12994 case BUILT_IN_PREFETCH
:
12998 return is_simple_builtin (decl
);