1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
60 struct target_builtins default_target_builtins
;
62 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names
[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names
[(int) END_BUILTINS
] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info
;
80 static const char *c_getstr (tree
);
81 static rtx
c_readstr (const char *, enum machine_mode
);
82 static int target_char_cast (tree
, char *);
83 static rtx
get_memory_rtx (tree
, tree
);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx
result_vector (int, rtx
);
89 static void expand_builtin_update_setjmp_buf (rtx
);
90 static void expand_builtin_prefetch (tree
);
91 static rtx
expand_builtin_apply_args (void);
92 static rtx
expand_builtin_apply_args_1 (void);
93 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
94 static void expand_builtin_return (rtx
);
95 static enum type_class
type_to_class (tree
);
96 static rtx
expand_builtin_classify_type (tree
);
97 static void expand_errno_check (tree
, rtx
);
98 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
103 static rtx
expand_builtin_sincos (tree
);
104 static rtx
expand_builtin_cexpi (tree
, rtx
);
105 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
106 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
107 static rtx
expand_builtin_next_arg (void);
108 static rtx
expand_builtin_va_start (tree
);
109 static rtx
expand_builtin_va_end (tree
);
110 static rtx
expand_builtin_va_copy (tree
);
111 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
112 static rtx
expand_builtin_strcmp (tree
, rtx
);
113 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
114 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
115 static rtx
expand_builtin_memcpy (tree
, rtx
);
116 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
118 enum machine_mode
, int);
119 static rtx
expand_builtin_strcpy (tree
, rtx
);
120 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
121 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_strncpy (tree
, rtx
);
123 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
126 static rtx
expand_builtin_bzero (tree
);
127 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_alloca (tree
, bool);
129 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
130 static rtx
expand_builtin_frame_address (tree
, tree
);
131 static tree
stabilize_va_list_loc (location_t
, tree
, int);
132 static rtx
expand_builtin_expect (tree
, rtx
);
133 static tree
fold_builtin_constant_p (tree
);
134 static tree
fold_builtin_expect (location_t
, tree
, tree
);
135 static tree
fold_builtin_classify_type (tree
);
136 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
137 static tree
fold_builtin_inf (location_t
, tree
, int);
138 static tree
fold_builtin_nan (tree
, tree
, int);
139 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
140 static bool validate_arg (const_tree
, enum tree_code code
);
141 static bool integer_valued_real_p (tree
);
142 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
143 static bool readonly_data_expr (tree
);
144 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
145 static rtx
expand_builtin_signbit (tree
, rtx
);
146 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
147 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
148 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
149 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
150 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
151 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
152 static tree
fold_builtin_tan (tree
, tree
);
153 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
154 static tree
fold_builtin_floor (location_t
, tree
, tree
);
155 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
156 static tree
fold_builtin_round (location_t
, tree
, tree
);
157 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
158 static tree
fold_builtin_bitop (tree
, tree
);
159 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
160 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
164 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
166 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_isascii (location_t
, tree
);
168 static tree
fold_builtin_toascii (location_t
, tree
);
169 static tree
fold_builtin_isdigit (location_t
, tree
);
170 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
171 static tree
fold_builtin_abs (location_t
, tree
, tree
);
172 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
174 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
175 static tree
fold_builtin_0 (location_t
, tree
, bool);
176 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
177 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
178 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
179 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
180 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
182 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
184 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
186 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
187 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
188 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
189 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
190 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
192 static rtx
expand_builtin_object_size (tree
);
193 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
194 enum built_in_function
);
195 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
196 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
197 static void maybe_emit_free_warning (tree
);
198 static tree
fold_builtin_object_size (tree
, tree
);
199 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
200 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
201 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
202 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
203 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
204 enum built_in_function
);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline
;
208 static unsigned HOST_WIDE_INT target_percent
;
209 static unsigned HOST_WIDE_INT target_c
;
210 static unsigned HOST_WIDE_INT target_s
;
211 static char target_percent_c
[3];
212 static char target_percent_s
[3];
213 static char target_percent_s_newline
[4];
214 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
215 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
216 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
219 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
220 static tree
do_mpfr_sincos (tree
, tree
, tree
);
221 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
222 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
223 const REAL_VALUE_TYPE
*, bool);
224 static tree
do_mpfr_remquo (tree
, tree
, tree
);
225 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name
)
233 if (strncmp (name
, "__builtin_", 10) == 0)
235 if (strncmp (name
, "__sync_", 7) == 0)
237 if (strncmp (name
, "__atomic_", 9) == 0)
243 /* Return true if DECL is a function symbol representing a built-in. */
246 is_builtin_fn (tree decl
)
248 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
257 called_as_built_in (tree node
)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
262 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
263 return is_builtin_name (name
);
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address.
278 If ADDR_P is true we are taking the address of the memory reference EXP
279 and thus cannot rely on the access taking place. */
282 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
283 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
285 HOST_WIDE_INT bitsize
, bitpos
;
287 enum machine_mode mode
;
288 int unsignedp
, volatilep
;
289 unsigned int inner
, align
= BITS_PER_UNIT
;
290 bool known_alignment
= false;
292 /* Get the innermost object and the constant (bitpos) and possibly
293 variable (offset) offset of the access. */
294 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
295 &mode
, &unsignedp
, &volatilep
, true);
297 /* Extract alignment information from the innermost object and
298 possibly adjust bitpos and offset. */
299 if (TREE_CODE (exp
) == FUNCTION_DECL
)
301 /* Function addresses can encode extra information besides their
302 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
303 allows the low bit to be used as a virtual bit, we know
304 that the address itself must be at least 2-byte aligned. */
305 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
306 align
= 2 * BITS_PER_UNIT
;
308 else if (TREE_CODE (exp
) == LABEL_DECL
)
310 else if (TREE_CODE (exp
) == CONST_DECL
)
312 /* The alignment of a CONST_DECL is determined by its initializer. */
313 exp
= DECL_INITIAL (exp
);
314 align
= TYPE_ALIGN (TREE_TYPE (exp
));
315 #ifdef CONSTANT_ALIGNMENT
316 if (CONSTANT_CLASS_P (exp
))
317 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
319 known_alignment
= true;
321 else if (DECL_P (exp
))
323 align
= DECL_ALIGN (exp
);
324 known_alignment
= true;
326 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
328 align
= TYPE_ALIGN (TREE_TYPE (exp
));
330 else if (TREE_CODE (exp
) == INDIRECT_REF
331 || TREE_CODE (exp
) == MEM_REF
332 || TREE_CODE (exp
) == TARGET_MEM_REF
)
334 tree addr
= TREE_OPERAND (exp
, 0);
336 unsigned HOST_WIDE_INT ptr_bitpos
;
338 if (TREE_CODE (addr
) == BIT_AND_EXPR
339 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
341 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
342 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
343 align
*= BITS_PER_UNIT
;
344 addr
= TREE_OPERAND (addr
, 0);
348 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
349 align
= MAX (ptr_align
, align
);
351 /* The alignment of the pointer operand in a TARGET_MEM_REF
352 has to take the variable offset parts into account. */
353 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
357 unsigned HOST_WIDE_INT step
= 1;
359 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
360 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
362 if (TMR_INDEX2 (exp
))
363 align
= BITS_PER_UNIT
;
364 known_alignment
= false;
367 /* When EXP is an actual memory reference then we can use
368 TYPE_ALIGN of a pointer indirection to derive alignment.
369 Do so only if get_pointer_alignment_1 did not reveal absolute
370 alignment knowledge and if using that alignment would
371 improve the situation. */
372 if (!addr_p
&& !known_alignment
373 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
374 align
= TYPE_ALIGN (TREE_TYPE (exp
));
377 /* Else adjust bitpos accordingly. */
378 bitpos
+= ptr_bitpos
;
379 if (TREE_CODE (exp
) == MEM_REF
380 || TREE_CODE (exp
) == TARGET_MEM_REF
)
381 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
384 else if (TREE_CODE (exp
) == STRING_CST
)
386 /* STRING_CST are the only constant objects we allow to be not
387 wrapped inside a CONST_DECL. */
388 align
= TYPE_ALIGN (TREE_TYPE (exp
));
389 #ifdef CONSTANT_ALIGNMENT
390 if (CONSTANT_CLASS_P (exp
))
391 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
393 known_alignment
= true;
396 /* If there is a non-constant offset part extract the maximum
397 alignment that can prevail. */
403 if (TREE_CODE (offset
) == PLUS_EXPR
)
405 next_offset
= TREE_OPERAND (offset
, 0);
406 offset
= TREE_OPERAND (offset
, 1);
410 if (host_integerp (offset
, 1))
412 /* Any overflow in calculating offset_bits won't change
415 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
418 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
420 else if (TREE_CODE (offset
) == MULT_EXPR
421 && host_integerp (TREE_OPERAND (offset
, 1), 1))
423 /* Any overflow in calculating offset_factor won't change
425 unsigned offset_factor
426 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
430 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
434 inner
= MIN (inner
, BITS_PER_UNIT
);
437 offset
= next_offset
;
439 /* Alignment is innermost object alignment adjusted by the constant
440 and non-constant offset parts. */
441 align
= MIN (align
, inner
);
444 *bitposp
= bitpos
& (*alignp
- 1);
445 return known_alignment
;
448 /* For a memory reference expression EXP compute values M and N such that M
449 divides (&EXP - N) and such that N < M. If these numbers can be determined,
450 store M in alignp and N in *BITPOSP and return true. Otherwise return false
451 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
454 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
455 unsigned HOST_WIDE_INT
*bitposp
)
457 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
460 /* Return the alignment in bits of EXP, an object. */
463 get_object_alignment (tree exp
)
465 unsigned HOST_WIDE_INT bitpos
= 0;
468 get_object_alignment_1 (exp
, &align
, &bitpos
);
470 /* align and bitpos now specify known low bits of the pointer.
471 ptr & (align - 1) == bitpos. */
474 align
= (bitpos
& -bitpos
);
478 /* For a pointer valued expression EXP compute values M and N such that M
479 divides (EXP - N) and such that N < M. If these numbers can be determined,
480 store M in alignp and N in *BITPOSP and return true. Return false if
481 the results are just a conservative approximation.
483 If EXP is not a pointer, false is returned too. */
486 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
487 unsigned HOST_WIDE_INT
*bitposp
)
491 if (TREE_CODE (exp
) == ADDR_EXPR
)
492 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
493 alignp
, bitposp
, true);
494 else if (TREE_CODE (exp
) == SSA_NAME
495 && POINTER_TYPE_P (TREE_TYPE (exp
)))
497 unsigned int ptr_align
, ptr_misalign
;
498 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
500 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
502 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
503 *alignp
= ptr_align
* BITS_PER_UNIT
;
504 /* We cannot really tell whether this result is an approximation. */
510 *alignp
= BITS_PER_UNIT
;
514 else if (TREE_CODE (exp
) == INTEGER_CST
)
516 *alignp
= BIGGEST_ALIGNMENT
;
517 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
518 & (BIGGEST_ALIGNMENT
- 1));
523 *alignp
= BITS_PER_UNIT
;
527 /* Return the alignment in bits of EXP, a pointer valued expression.
528 The alignment returned is, by default, the alignment of the thing that
529 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
531 Otherwise, look at the expression to see if we can do better, i.e., if the
532 expression is actually pointing at an object whose alignment is tighter. */
535 get_pointer_alignment (tree exp
)
537 unsigned HOST_WIDE_INT bitpos
= 0;
540 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
542 /* align and bitpos now specify known low bits of the pointer.
543 ptr & (align - 1) == bitpos. */
546 align
= (bitpos
& -bitpos
);
551 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
552 way, because it could contain a zero byte in the middle.
553 TREE_STRING_LENGTH is the size of the character array, not the string.
555 ONLY_VALUE should be nonzero if the result is not going to be emitted
556 into the instruction stream and zero if it is going to be expanded.
557 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
558 is returned, otherwise NULL, since
559 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
560 evaluate the side-effects.
562 The value returned is of type `ssizetype'.
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
568 c_strlen (tree src
, int only_value
)
571 HOST_WIDE_INT offset
;
577 if (TREE_CODE (src
) == COND_EXPR
578 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
582 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
583 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
584 if (tree_int_cst_equal (len1
, len2
))
588 if (TREE_CODE (src
) == COMPOUND_EXPR
589 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
590 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
592 loc
= EXPR_LOC_OR_HERE (src
);
594 src
= string_constant (src
, &offset_node
);
598 max
= TREE_STRING_LENGTH (src
) - 1;
599 ptr
= TREE_STRING_POINTER (src
);
601 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
608 for (i
= 0; i
< max
; i
++)
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
619 return size_diffop_loc (loc
, size_int (max
), offset_node
);
622 /* We have a known offset into the string. Start searching there for
623 a null character if we can represent it as a single HOST_WIDE_INT. */
624 if (offset_node
== 0)
626 else if (! host_integerp (offset_node
, 0))
629 offset
= tree_low_cst (offset_node
, 0);
631 /* If the offset is known to be out of bounds, warn, and call strlen at
633 if (offset
< 0 || offset
> max
)
635 /* Suppress multiple warnings for propagated constant strings. */
636 if (! TREE_NO_WARNING (src
))
638 warning_at (loc
, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src
) = 1;
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr
+ offset
));
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
661 src
= string_constant (src
, &offset_node
);
665 if (offset_node
== 0)
666 return TREE_STRING_POINTER (src
);
667 else if (!host_integerp (offset_node
, 1)
668 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
671 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
674 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678 c_readstr (const char *str
, enum machine_mode mode
)
684 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
689 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
692 if (WORDS_BIG_ENDIAN
)
693 j
= GET_MODE_SIZE (mode
) - i
- 1;
694 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
695 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
696 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
698 gcc_assert (j
< HOST_BITS_PER_DOUBLE_INT
);
701 ch
= (unsigned char) str
[i
];
702 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
704 return immed_double_const (c
[0], c
[1], mode
);
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
712 target_char_cast (tree cst
, char *p
)
714 unsigned HOST_WIDE_INT val
, hostval
;
716 if (TREE_CODE (cst
) != INTEGER_CST
717 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
720 val
= TREE_INT_CST_LOW (cst
);
721 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
722 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
725 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
726 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
735 /* Similar to save_expr, but assumes that arbitrary code is not executed
736 in between the multiple evaluations. In particular, we assume that a
737 non-addressable local variable will not be modified. */
740 builtin_save_expr (tree exp
)
742 if (TREE_CODE (exp
) == SSA_NAME
743 || (TREE_ADDRESSABLE (exp
) == 0
744 && (TREE_CODE (exp
) == PARM_DECL
745 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
748 return save_expr (exp
);
751 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
752 times to get the address of either a higher stack frame, or a return
753 address located within it (depending on FNDECL_CODE). */
756 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
760 #ifdef INITIAL_FRAME_ADDRESS_RTX
761 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
765 /* For a zero count with __builtin_return_address, we don't care what
766 frame address we return, because target-specific definitions will
767 override us. Therefore frame pointer elimination is OK, and using
768 the soft frame pointer is OK.
770 For a nonzero count, or a zero count with __builtin_frame_address,
771 we require a stable offset from the current frame pointer to the
772 previous one, so we must use the hard frame pointer, and
773 we must disable frame pointer elimination. */
774 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
775 tem
= frame_pointer_rtx
;
778 tem
= hard_frame_pointer_rtx
;
780 /* Tell reload not to eliminate the frame pointer. */
781 crtl
->accesses_prior_frames
= 1;
785 /* Some machines need special handling before we can access
786 arbitrary frames. For example, on the SPARC, we must first flush
787 all register windows to the stack. */
788 #ifdef SETUP_FRAME_ADDRESSES
790 SETUP_FRAME_ADDRESSES ();
793 /* On the SPARC, the return address is not in the frame, it is in a
794 register. There is no way to access it off of the current frame
795 pointer, but it can be accessed off the previous frame pointer by
796 reading the value from the register window save area. */
797 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
798 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
802 /* Scan back COUNT frames to the specified frame. */
803 for (i
= 0; i
< count
; i
++)
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 #ifdef DYNAMIC_CHAIN_ADDRESS
808 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
810 tem
= memory_address (Pmode
, tem
);
811 tem
= gen_frame_mem (Pmode
, tem
);
812 tem
= copy_to_reg (tem
);
815 /* For __builtin_frame_address, return what we've got. But, on
816 the SPARC for example, we may have to add a bias. */
817 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
818 #ifdef FRAME_ADDR_RTX
819 return FRAME_ADDR_RTX (tem
);
824 /* For __builtin_return_address, get the return address from that frame. */
825 #ifdef RETURN_ADDR_RTX
826 tem
= RETURN_ADDR_RTX (count
, tem
);
828 tem
= memory_address (Pmode
,
829 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
830 tem
= gen_frame_mem (Pmode
, tem
);
835 /* Alias set used for setjmp buffer. */
836 static alias_set_type setjmp_alias_set
= -1;
838 /* Construct the leading half of a __builtin_setjmp call. Control will
839 return to RECEIVER_LABEL. This is also called directly by the SJLJ
840 exception handling code. */
843 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
845 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
849 if (setjmp_alias_set
== -1)
850 setjmp_alias_set
= new_alias_set ();
852 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
854 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
856 /* We store the frame pointer and the address of receiver_label in
857 the buffer and use the rest of it for the stack save area, which
858 is machine-dependent. */
860 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
861 set_mem_alias_set (mem
, setjmp_alias_set
);
862 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
864 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
865 GET_MODE_SIZE (Pmode
))),
866 set_mem_alias_set (mem
, setjmp_alias_set
);
868 emit_move_insn (validize_mem (mem
),
869 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
871 stack_save
= gen_rtx_MEM (sa_mode
,
872 plus_constant (Pmode
, buf_addr
,
873 2 * GET_MODE_SIZE (Pmode
)));
874 set_mem_alias_set (stack_save
, setjmp_alias_set
);
875 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
877 /* If there is further processing to do, do it. */
878 #ifdef HAVE_builtin_setjmp_setup
879 if (HAVE_builtin_setjmp_setup
)
880 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
883 /* We have a nonlocal label. */
884 cfun
->has_nonlocal_label
= 1;
887 /* Construct the trailing part of a __builtin_setjmp call. This is
888 also called directly by the SJLJ exception handling code. */
891 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
895 /* Clobber the FP when we get here, so we have to make sure it's
896 marked as used by this function. */
897 emit_use (hard_frame_pointer_rtx
);
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
901 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
902 if (chain
&& REG_P (chain
))
903 emit_clobber (chain
);
905 /* Now put in the code to restore the frame pointer, and argument
906 pointer, if needed. */
907 #ifdef HAVE_nonlocal_goto
908 if (! HAVE_nonlocal_goto
)
911 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
912 /* This might change the hard frame pointer in ways that aren't
913 apparent to early optimization passes, so force a clobber. */
914 emit_clobber (hard_frame_pointer_rtx
);
917 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
918 if (fixed_regs
[ARG_POINTER_REGNUM
])
920 #ifdef ELIMINABLE_REGS
922 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
924 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
925 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
926 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
929 if (i
== ARRAY_SIZE (elim_regs
))
932 /* Now restore our arg pointer from the address at which it
933 was saved in our stack frame. */
934 emit_move_insn (crtl
->args
.internal_arg_pointer
,
935 copy_to_reg (get_arg_pointer_save_area ()));
940 #ifdef HAVE_builtin_setjmp_receiver
941 if (HAVE_builtin_setjmp_receiver
)
942 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
945 #ifdef HAVE_nonlocal_goto_receiver
946 if (HAVE_nonlocal_goto_receiver
)
947 emit_insn (gen_nonlocal_goto_receiver ());
952 /* We must not allow the code we just generated to be reordered by
953 scheduling. Specifically, the update of the frame pointer must
954 happen immediately, not later. */
955 emit_insn (gen_blockage ());
958 /* __builtin_longjmp is passed a pointer to an array of five words (not
959 all will be used on all machines). It operates similarly to the C
960 library function of the same name, but is more efficient. Much of
961 the code below is copied from the handling of non-local gotos. */
964 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
966 rtx fp
, lab
, stack
, insn
, last
;
967 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
969 /* DRAP is needed for stack realign if longjmp is expanded to current
971 if (SUPPORTS_STACK_ALIGNMENT
)
972 crtl
->need_drap
= true;
974 if (setjmp_alias_set
== -1)
975 setjmp_alias_set
= new_alias_set ();
977 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
979 buf_addr
= force_reg (Pmode
, buf_addr
);
981 /* We require that the user must pass a second argument of 1, because
982 that is what builtin_setjmp will return. */
983 gcc_assert (value
== const1_rtx
);
985 last
= get_last_insn ();
986 #ifdef HAVE_builtin_longjmp
987 if (HAVE_builtin_longjmp
)
988 emit_insn (gen_builtin_longjmp (buf_addr
));
992 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
993 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
994 GET_MODE_SIZE (Pmode
)));
996 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
997 2 * GET_MODE_SIZE (Pmode
)));
998 set_mem_alias_set (fp
, setjmp_alias_set
);
999 set_mem_alias_set (lab
, setjmp_alias_set
);
1000 set_mem_alias_set (stack
, setjmp_alias_set
);
1002 /* Pick up FP, label, and SP from the block and jump. This code is
1003 from expand_goto in stmt.c; see there for detailed comments. */
1004 #ifdef HAVE_nonlocal_goto
1005 if (HAVE_nonlocal_goto
)
1006 /* We have to pass a value to the nonlocal_goto pattern that will
1007 get copied into the static_chain pointer, but it does not matter
1008 what that value is, because builtin_setjmp does not use it. */
1009 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1013 lab
= copy_to_reg (lab
);
1015 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1016 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1018 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1019 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1021 emit_use (hard_frame_pointer_rtx
);
1022 emit_use (stack_pointer_rtx
);
1023 emit_indirect_jump (lab
);
1027 /* Search backwards and mark the jump insn as a non-local goto.
1028 Note that this precludes the use of __builtin_longjmp to a
1029 __builtin_setjmp target in the same function. However, we've
1030 already cautioned the user that these functions are for
1031 internal exception handling use only. */
1032 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1034 gcc_assert (insn
!= last
);
1038 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1041 else if (CALL_P (insn
))
1046 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1047 and the address of the save area. */
1050 expand_builtin_nonlocal_goto (tree exp
)
1052 tree t_label
, t_save_area
;
1053 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1055 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1058 t_label
= CALL_EXPR_ARG (exp
, 0);
1059 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1061 r_label
= expand_normal (t_label
);
1062 r_label
= convert_memory_address (Pmode
, r_label
);
1063 r_save_area
= expand_normal (t_save_area
);
1064 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1065 /* Copy the address of the save location to a register just in case it was
1066 based on the frame pointer. */
1067 r_save_area
= copy_to_reg (r_save_area
);
1068 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1069 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1070 plus_constant (Pmode
, r_save_area
,
1071 GET_MODE_SIZE (Pmode
)));
1073 crtl
->has_nonlocal_goto
= 1;
1075 #ifdef HAVE_nonlocal_goto
1076 /* ??? We no longer need to pass the static chain value, afaik. */
1077 if (HAVE_nonlocal_goto
)
1078 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1082 r_label
= copy_to_reg (r_label
);
1084 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1085 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1087 /* Restore frame pointer for containing function. */
1088 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1089 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1091 /* USE of hard_frame_pointer_rtx added for consistency;
1092 not clear if really needed. */
1093 emit_use (hard_frame_pointer_rtx
);
1094 emit_use (stack_pointer_rtx
);
1096 /* If the architecture is using a GP register, we must
1097 conservatively assume that the target function makes use of it.
1098 The prologue of functions with nonlocal gotos must therefore
1099 initialize the GP register to the appropriate value, and we
1100 must then make sure that this value is live at the point
1101 of the jump. (Note that this doesn't necessarily apply
1102 to targets with a nonlocal_goto pattern; they are free
1103 to implement it in their own way. Note also that this is
1104 a no-op if the GP register is a global invariant.) */
1105 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1106 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1107 emit_use (pic_offset_table_rtx
);
1109 emit_indirect_jump (r_label
);
1112 /* Search backwards to the jump insn and mark it as a
1114 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1118 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1121 else if (CALL_P (insn
))
1128 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1129 (not all will be used on all machines) that was passed to __builtin_setjmp.
1130 It updates the stack pointer in that block to correspond to the current
1134 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1136 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1138 = gen_rtx_MEM (sa_mode
,
1141 plus_constant (Pmode
, buf_addr
,
1142 2 * GET_MODE_SIZE (Pmode
))));
1144 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1147 /* Expand a call to __builtin_prefetch. For a target that does not support
1148 data prefetch, evaluate the memory address argument in case it has side
1152 expand_builtin_prefetch (tree exp
)
1154 tree arg0
, arg1
, arg2
;
1158 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1161 arg0
= CALL_EXPR_ARG (exp
, 0);
1163 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1164 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1166 nargs
= call_expr_nargs (exp
);
1168 arg1
= CALL_EXPR_ARG (exp
, 1);
1170 arg1
= integer_zero_node
;
1172 arg2
= CALL_EXPR_ARG (exp
, 2);
1174 arg2
= integer_three_node
;
1176 /* Argument 0 is an address. */
1177 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1179 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1180 if (TREE_CODE (arg1
) != INTEGER_CST
)
1182 error ("second argument to %<__builtin_prefetch%> must be a constant");
1183 arg1
= integer_zero_node
;
1185 op1
= expand_normal (arg1
);
1186 /* Argument 1 must be either zero or one. */
1187 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1189 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1194 /* Argument 2 (locality) must be a compile-time constant int. */
1195 if (TREE_CODE (arg2
) != INTEGER_CST
)
1197 error ("third argument to %<__builtin_prefetch%> must be a constant");
1198 arg2
= integer_zero_node
;
1200 op2
= expand_normal (arg2
);
1201 /* Argument 2 must be 0, 1, 2, or 3. */
1202 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1204 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1208 #ifdef HAVE_prefetch
1211 struct expand_operand ops
[3];
1213 create_address_operand (&ops
[0], op0
);
1214 create_integer_operand (&ops
[1], INTVAL (op1
));
1215 create_integer_operand (&ops
[2], INTVAL (op2
));
1216 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1221 /* Don't do anything with direct references to volatile memory, but
1222 generate code to handle other side effects. */
1223 if (!MEM_P (op0
) && side_effects_p (op0
))
1227 /* Get a MEM rtx for expression EXP which is the address of an operand
1228 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1229 the maximum length of the block of memory that might be accessed or
1233 get_memory_rtx (tree exp
, tree len
)
1235 tree orig_exp
= exp
;
1238 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1239 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1240 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1241 exp
= TREE_OPERAND (exp
, 0);
1243 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1244 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1246 /* Get an expression we can use to find the attributes to assign to MEM.
1247 First remove any nops. */
1248 while (CONVERT_EXPR_P (exp
)
1249 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1250 exp
= TREE_OPERAND (exp
, 0);
1252 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1253 (as builtin stringops may alias with anything). */
1254 exp
= fold_build2 (MEM_REF
,
1255 build_array_type (char_type_node
,
1256 build_range_type (sizetype
,
1257 size_one_node
, len
)),
1258 exp
, build_int_cst (ptr_type_node
, 0));
1260 /* If the MEM_REF has no acceptable address, try to get the base object
1261 from the original address we got, and build an all-aliasing
1262 unknown-sized access to that one. */
1263 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1264 set_mem_attributes (mem
, exp
, 0);
1265 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1266 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1269 exp
= build_fold_addr_expr (exp
);
1270 exp
= fold_build2 (MEM_REF
,
1271 build_array_type (char_type_node
,
1272 build_range_type (sizetype
,
1275 exp
, build_int_cst (ptr_type_node
, 0));
1276 set_mem_attributes (mem
, exp
, 0);
1278 set_mem_alias_set (mem
, 0);
1282 /* Built-in functions to perform an untyped call and return. */
1284 #define apply_args_mode \
1285 (this_target_builtins->x_apply_args_mode)
1286 #define apply_result_mode \
1287 (this_target_builtins->x_apply_result_mode)
1289 /* Return the size required for the block returned by __builtin_apply_args,
1290 and initialize apply_args_mode. */
1293 apply_args_size (void)
1295 static int size
= -1;
1298 enum machine_mode mode
;
1300 /* The values computed by this function never change. */
1303 /* The first value is the incoming arg-pointer. */
1304 size
= GET_MODE_SIZE (Pmode
);
1306 /* The second value is the structure value address unless this is
1307 passed as an "invisible" first argument. */
1308 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1309 size
+= GET_MODE_SIZE (Pmode
);
1311 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1312 if (FUNCTION_ARG_REGNO_P (regno
))
1314 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1316 gcc_assert (mode
!= VOIDmode
);
1318 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1319 if (size
% align
!= 0)
1320 size
= CEIL (size
, align
) * align
;
1321 size
+= GET_MODE_SIZE (mode
);
1322 apply_args_mode
[regno
] = mode
;
1326 apply_args_mode
[regno
] = VOIDmode
;
1332 /* Return the size required for the block returned by __builtin_apply,
1333 and initialize apply_result_mode. */
1336 apply_result_size (void)
1338 static int size
= -1;
1340 enum machine_mode mode
;
1342 /* The values computed by this function never change. */
1347 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1348 if (targetm
.calls
.function_value_regno_p (regno
))
1350 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1352 gcc_assert (mode
!= VOIDmode
);
1354 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1355 if (size
% align
!= 0)
1356 size
= CEIL (size
, align
) * align
;
1357 size
+= GET_MODE_SIZE (mode
);
1358 apply_result_mode
[regno
] = mode
;
1361 apply_result_mode
[regno
] = VOIDmode
;
1363 /* Allow targets that use untyped_call and untyped_return to override
1364 the size so that machine-specific information can be stored here. */
1365 #ifdef APPLY_RESULT_SIZE
1366 size
= APPLY_RESULT_SIZE
;
1372 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1373 /* Create a vector describing the result block RESULT. If SAVEP is true,
1374 the result block is used to save the values; otherwise it is used to
1375 restore the values. */
1378 result_vector (int savep
, rtx result
)
1380 int regno
, size
, align
, nelts
;
1381 enum machine_mode mode
;
1383 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1386 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1387 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1389 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1390 if (size
% align
!= 0)
1391 size
= CEIL (size
, align
) * align
;
1392 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1393 mem
= adjust_address (result
, mode
, size
);
1394 savevec
[nelts
++] = (savep
1395 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1396 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1397 size
+= GET_MODE_SIZE (mode
);
1399 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1401 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1403 /* Save the state required to perform an untyped call with the same
1404 arguments as were passed to the current function. */
1407 expand_builtin_apply_args_1 (void)
1410 int size
, align
, regno
;
1411 enum machine_mode mode
;
1412 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1414 /* Create a block where the arg-pointer, structure value address,
1415 and argument registers can be saved. */
1416 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1418 /* Walk past the arg-pointer and structure value address. */
1419 size
= GET_MODE_SIZE (Pmode
);
1420 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1421 size
+= GET_MODE_SIZE (Pmode
);
1423 /* Save each register used in calling a function to the block. */
1424 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1425 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1427 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1428 if (size
% align
!= 0)
1429 size
= CEIL (size
, align
) * align
;
1431 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1433 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1434 size
+= GET_MODE_SIZE (mode
);
1437 /* Save the arg pointer to the block. */
1438 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1439 #ifdef STACK_GROWS_DOWNWARD
1440 /* We need the pointer as the caller actually passed them to us, not
1441 as we might have pretended they were passed. Make sure it's a valid
1442 operand, as emit_move_insn isn't expected to handle a PLUS. */
1444 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1447 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1449 size
= GET_MODE_SIZE (Pmode
);
1451 /* Save the structure value address unless this is passed as an
1452 "invisible" first argument. */
1453 if (struct_incoming_value
)
1455 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1456 copy_to_reg (struct_incoming_value
));
1457 size
+= GET_MODE_SIZE (Pmode
);
1460 /* Return the address of the block. */
1461 return copy_addr_to_reg (XEXP (registers
, 0));
1464 /* __builtin_apply_args returns block of memory allocated on
1465 the stack into which is stored the arg pointer, structure
1466 value address, static chain, and all the registers that might
1467 possibly be used in performing a function call. The code is
1468 moved to the start of the function so the incoming values are
1472 expand_builtin_apply_args (void)
1474 /* Don't do __builtin_apply_args more than once in a function.
1475 Save the result of the first call and reuse it. */
1476 if (apply_args_value
!= 0)
1477 return apply_args_value
;
1479 /* When this function is called, it means that registers must be
1480 saved on entry to this function. So we migrate the
1481 call to the first insn of this function. */
1486 temp
= expand_builtin_apply_args_1 ();
1490 apply_args_value
= temp
;
1492 /* Put the insns after the NOTE that starts the function.
1493 If this is inside a start_sequence, make the outer-level insn
1494 chain current, so the code is placed at the start of the
1495 function. If internal_arg_pointer is a non-virtual pseudo,
1496 it needs to be placed after the function that initializes
1498 push_topmost_sequence ();
1499 if (REG_P (crtl
->args
.internal_arg_pointer
)
1500 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1501 emit_insn_before (seq
, parm_birth_insn
);
1503 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1504 pop_topmost_sequence ();
1509 /* Perform an untyped call and save the state required to perform an
1510 untyped return of whatever value was returned by the given function. */
1513 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1515 int size
, align
, regno
;
1516 enum machine_mode mode
;
1517 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1518 rtx old_stack_level
= 0;
1519 rtx call_fusage
= 0;
1520 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1522 arguments
= convert_memory_address (Pmode
, arguments
);
1524 /* Create a block where the return registers can be saved. */
1525 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1527 /* Fetch the arg pointer from the ARGUMENTS block. */
1528 incoming_args
= gen_reg_rtx (Pmode
);
1529 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1530 #ifndef STACK_GROWS_DOWNWARD
1531 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1532 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1535 /* Push a new argument block and copy the arguments. Do not allow
1536 the (potential) memcpy call below to interfere with our stack
1538 do_pending_stack_adjust ();
1541 /* Save the stack with nonlocal if available. */
1542 #ifdef HAVE_save_stack_nonlocal
1543 if (HAVE_save_stack_nonlocal
)
1544 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1547 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1549 /* Allocate a block of memory onto the stack and copy the memory
1550 arguments to the outgoing arguments address. We can pass TRUE
1551 as the 4th argument because we just saved the stack pointer
1552 and will restore it right after the call. */
1553 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1555 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1556 may have already set current_function_calls_alloca to true.
1557 current_function_calls_alloca won't be set if argsize is zero,
1558 so we have to guarantee need_drap is true here. */
1559 if (SUPPORTS_STACK_ALIGNMENT
)
1560 crtl
->need_drap
= true;
1562 dest
= virtual_outgoing_args_rtx
;
1563 #ifndef STACK_GROWS_DOWNWARD
1564 if (CONST_INT_P (argsize
))
1565 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1567 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1569 dest
= gen_rtx_MEM (BLKmode
, dest
);
1570 set_mem_align (dest
, PARM_BOUNDARY
);
1571 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1572 set_mem_align (src
, PARM_BOUNDARY
);
1573 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1575 /* Refer to the argument block. */
1577 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1578 set_mem_align (arguments
, PARM_BOUNDARY
);
1580 /* Walk past the arg-pointer and structure value address. */
1581 size
= GET_MODE_SIZE (Pmode
);
1583 size
+= GET_MODE_SIZE (Pmode
);
1585 /* Restore each of the registers previously saved. Make USE insns
1586 for each of these registers for use in making the call. */
1587 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1588 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1590 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1591 if (size
% align
!= 0)
1592 size
= CEIL (size
, align
) * align
;
1593 reg
= gen_rtx_REG (mode
, regno
);
1594 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1595 use_reg (&call_fusage
, reg
);
1596 size
+= GET_MODE_SIZE (mode
);
1599 /* Restore the structure value address unless this is passed as an
1600 "invisible" first argument. */
1601 size
= GET_MODE_SIZE (Pmode
);
1604 rtx value
= gen_reg_rtx (Pmode
);
1605 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1606 emit_move_insn (struct_value
, value
);
1607 if (REG_P (struct_value
))
1608 use_reg (&call_fusage
, struct_value
);
1609 size
+= GET_MODE_SIZE (Pmode
);
1612 /* All arguments and registers used for the call are set up by now! */
1613 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1615 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1616 and we don't want to load it into a register as an optimization,
1617 because prepare_call_address already did it if it should be done. */
1618 if (GET_CODE (function
) != SYMBOL_REF
)
1619 function
= memory_address (FUNCTION_MODE
, function
);
1621 /* Generate the actual call instruction and save the return value. */
1622 #ifdef HAVE_untyped_call
1623 if (HAVE_untyped_call
)
1624 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1625 result
, result_vector (1, result
)));
1628 #ifdef HAVE_call_value
1629 if (HAVE_call_value
)
1633 /* Locate the unique return register. It is not possible to
1634 express a call that sets more than one return register using
1635 call_value; use untyped_call for that. In fact, untyped_call
1636 only needs to save the return registers in the given block. */
1637 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1638 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1640 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1642 valreg
= gen_rtx_REG (mode
, regno
);
1645 emit_call_insn (GEN_CALL_VALUE (valreg
,
1646 gen_rtx_MEM (FUNCTION_MODE
, function
),
1647 const0_rtx
, NULL_RTX
, const0_rtx
));
1649 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1655 /* Find the CALL insn we just emitted, and attach the register usage
1657 call_insn
= last_call_insn ();
1658 add_function_usage_to (call_insn
, call_fusage
);
1660 /* Restore the stack. */
1661 #ifdef HAVE_save_stack_nonlocal
1662 if (HAVE_save_stack_nonlocal
)
1663 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1666 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1667 fixup_args_size_notes (call_insn
, get_last_insn(), 0);
1671 /* Return the address of the result block. */
1672 result
= copy_addr_to_reg (XEXP (result
, 0));
1673 return convert_memory_address (ptr_mode
, result
);
1676 /* Perform an untyped return. */
1679 expand_builtin_return (rtx result
)
1681 int size
, align
, regno
;
1682 enum machine_mode mode
;
1684 rtx call_fusage
= 0;
1686 result
= convert_memory_address (Pmode
, result
);
1688 apply_result_size ();
1689 result
= gen_rtx_MEM (BLKmode
, result
);
1691 #ifdef HAVE_untyped_return
1692 if (HAVE_untyped_return
)
1694 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1700 /* Restore the return value and note that each value is used. */
1702 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1703 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1705 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1706 if (size
% align
!= 0)
1707 size
= CEIL (size
, align
) * align
;
1708 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1709 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1711 push_to_sequence (call_fusage
);
1713 call_fusage
= get_insns ();
1715 size
+= GET_MODE_SIZE (mode
);
1718 /* Put the USE insns before the return. */
1719 emit_insn (call_fusage
);
1721 /* Return whatever values was restored by jumping directly to the end
1723 expand_naked_return ();
1726 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1728 static enum type_class
1729 type_to_class (tree type
)
1731 switch (TREE_CODE (type
))
1733 case VOID_TYPE
: return void_type_class
;
1734 case INTEGER_TYPE
: return integer_type_class
;
1735 case ENUMERAL_TYPE
: return enumeral_type_class
;
1736 case BOOLEAN_TYPE
: return boolean_type_class
;
1737 case POINTER_TYPE
: return pointer_type_class
;
1738 case REFERENCE_TYPE
: return reference_type_class
;
1739 case OFFSET_TYPE
: return offset_type_class
;
1740 case REAL_TYPE
: return real_type_class
;
1741 case COMPLEX_TYPE
: return complex_type_class
;
1742 case FUNCTION_TYPE
: return function_type_class
;
1743 case METHOD_TYPE
: return method_type_class
;
1744 case RECORD_TYPE
: return record_type_class
;
1746 case QUAL_UNION_TYPE
: return union_type_class
;
1747 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1748 ? string_type_class
: array_type_class
);
1749 case LANG_TYPE
: return lang_type_class
;
1750 default: return no_type_class
;
1754 /* Expand a call EXP to __builtin_classify_type. */
1757 expand_builtin_classify_type (tree exp
)
1759 if (call_expr_nargs (exp
))
1760 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1761 return GEN_INT (no_type_class
);
1764 /* This helper macro, meant to be used in mathfn_built_in below,
1765 determines which among a set of three builtin math functions is
1766 appropriate for a given type mode. The `F' and `L' cases are
1767 automatically generated from the `double' case. */
1768 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1769 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1770 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1771 fcodel = BUILT_IN_MATHFN##L ; break;
1772 /* Similar to above, but appends _R after any F/L suffix. */
1773 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1774 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1775 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1776 fcodel = BUILT_IN_MATHFN##L_R ; break;
1778 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1779 if available. If IMPLICIT is true use the implicit builtin declaration,
1780 otherwise use the explicit declaration. If we can't do the conversion,
1784 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1786 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1790 CASE_MATHFN (BUILT_IN_ACOS
)
1791 CASE_MATHFN (BUILT_IN_ACOSH
)
1792 CASE_MATHFN (BUILT_IN_ASIN
)
1793 CASE_MATHFN (BUILT_IN_ASINH
)
1794 CASE_MATHFN (BUILT_IN_ATAN
)
1795 CASE_MATHFN (BUILT_IN_ATAN2
)
1796 CASE_MATHFN (BUILT_IN_ATANH
)
1797 CASE_MATHFN (BUILT_IN_CBRT
)
1798 CASE_MATHFN (BUILT_IN_CEIL
)
1799 CASE_MATHFN (BUILT_IN_CEXPI
)
1800 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1801 CASE_MATHFN (BUILT_IN_COS
)
1802 CASE_MATHFN (BUILT_IN_COSH
)
1803 CASE_MATHFN (BUILT_IN_DREM
)
1804 CASE_MATHFN (BUILT_IN_ERF
)
1805 CASE_MATHFN (BUILT_IN_ERFC
)
1806 CASE_MATHFN (BUILT_IN_EXP
)
1807 CASE_MATHFN (BUILT_IN_EXP10
)
1808 CASE_MATHFN (BUILT_IN_EXP2
)
1809 CASE_MATHFN (BUILT_IN_EXPM1
)
1810 CASE_MATHFN (BUILT_IN_FABS
)
1811 CASE_MATHFN (BUILT_IN_FDIM
)
1812 CASE_MATHFN (BUILT_IN_FLOOR
)
1813 CASE_MATHFN (BUILT_IN_FMA
)
1814 CASE_MATHFN (BUILT_IN_FMAX
)
1815 CASE_MATHFN (BUILT_IN_FMIN
)
1816 CASE_MATHFN (BUILT_IN_FMOD
)
1817 CASE_MATHFN (BUILT_IN_FREXP
)
1818 CASE_MATHFN (BUILT_IN_GAMMA
)
1819 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1820 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1821 CASE_MATHFN (BUILT_IN_HYPOT
)
1822 CASE_MATHFN (BUILT_IN_ILOGB
)
1823 CASE_MATHFN (BUILT_IN_ICEIL
)
1824 CASE_MATHFN (BUILT_IN_IFLOOR
)
1825 CASE_MATHFN (BUILT_IN_INF
)
1826 CASE_MATHFN (BUILT_IN_IRINT
)
1827 CASE_MATHFN (BUILT_IN_IROUND
)
1828 CASE_MATHFN (BUILT_IN_ISINF
)
1829 CASE_MATHFN (BUILT_IN_J0
)
1830 CASE_MATHFN (BUILT_IN_J1
)
1831 CASE_MATHFN (BUILT_IN_JN
)
1832 CASE_MATHFN (BUILT_IN_LCEIL
)
1833 CASE_MATHFN (BUILT_IN_LDEXP
)
1834 CASE_MATHFN (BUILT_IN_LFLOOR
)
1835 CASE_MATHFN (BUILT_IN_LGAMMA
)
1836 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1837 CASE_MATHFN (BUILT_IN_LLCEIL
)
1838 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1839 CASE_MATHFN (BUILT_IN_LLRINT
)
1840 CASE_MATHFN (BUILT_IN_LLROUND
)
1841 CASE_MATHFN (BUILT_IN_LOG
)
1842 CASE_MATHFN (BUILT_IN_LOG10
)
1843 CASE_MATHFN (BUILT_IN_LOG1P
)
1844 CASE_MATHFN (BUILT_IN_LOG2
)
1845 CASE_MATHFN (BUILT_IN_LOGB
)
1846 CASE_MATHFN (BUILT_IN_LRINT
)
1847 CASE_MATHFN (BUILT_IN_LROUND
)
1848 CASE_MATHFN (BUILT_IN_MODF
)
1849 CASE_MATHFN (BUILT_IN_NAN
)
1850 CASE_MATHFN (BUILT_IN_NANS
)
1851 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1852 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1853 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1854 CASE_MATHFN (BUILT_IN_POW
)
1855 CASE_MATHFN (BUILT_IN_POWI
)
1856 CASE_MATHFN (BUILT_IN_POW10
)
1857 CASE_MATHFN (BUILT_IN_REMAINDER
)
1858 CASE_MATHFN (BUILT_IN_REMQUO
)
1859 CASE_MATHFN (BUILT_IN_RINT
)
1860 CASE_MATHFN (BUILT_IN_ROUND
)
1861 CASE_MATHFN (BUILT_IN_SCALB
)
1862 CASE_MATHFN (BUILT_IN_SCALBLN
)
1863 CASE_MATHFN (BUILT_IN_SCALBN
)
1864 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1865 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1866 CASE_MATHFN (BUILT_IN_SIN
)
1867 CASE_MATHFN (BUILT_IN_SINCOS
)
1868 CASE_MATHFN (BUILT_IN_SINH
)
1869 CASE_MATHFN (BUILT_IN_SQRT
)
1870 CASE_MATHFN (BUILT_IN_TAN
)
1871 CASE_MATHFN (BUILT_IN_TANH
)
1872 CASE_MATHFN (BUILT_IN_TGAMMA
)
1873 CASE_MATHFN (BUILT_IN_TRUNC
)
1874 CASE_MATHFN (BUILT_IN_Y0
)
1875 CASE_MATHFN (BUILT_IN_Y1
)
1876 CASE_MATHFN (BUILT_IN_YN
)
1882 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1884 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1886 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1891 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1894 return builtin_decl_explicit (fcode2
);
1897 /* Like mathfn_built_in_1(), but always use the implicit array. */
1900 mathfn_built_in (tree type
, enum built_in_function fn
)
1902 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1905 /* If errno must be maintained, expand the RTL to check if the result,
1906 TARGET, of a built-in function call, EXP, is NaN, and if so set
1910 expand_errno_check (tree exp
, rtx target
)
1912 rtx lab
= gen_label_rtx ();
1914 /* Test the result; if it is NaN, set errno=EDOM because
1915 the argument was not in the domain. */
1916 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1917 NULL_RTX
, NULL_RTX
, lab
,
1918 /* The jump is very likely. */
1919 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1922 /* If this built-in doesn't throw an exception, set errno directly. */
1923 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1925 #ifdef GEN_ERRNO_RTX
1926 rtx errno_rtx
= GEN_ERRNO_RTX
;
1929 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1931 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1937 /* Make sure the library call isn't expanded as a tail call. */
1938 CALL_EXPR_TAILCALL (exp
) = 0;
1940 /* We can't set errno=EDOM directly; let the library call do it.
1941 Pop the arguments right away in case the call gets deleted. */
1943 expand_call (exp
, target
, 0);
1948 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1949 Return NULL_RTX if a normal call should be emitted rather than expanding
1950 the function in-line. EXP is the expression that is a call to the builtin
1951 function; if convenient, the result should be placed in TARGET.
1952 SUBTARGET may be used as the target for computing one of EXP's operands. */
1955 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1957 optab builtin_optab
;
1959 tree fndecl
= get_callee_fndecl (exp
);
1960 enum machine_mode mode
;
1961 bool errno_set
= false;
1964 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1967 arg
= CALL_EXPR_ARG (exp
, 0);
1969 switch (DECL_FUNCTION_CODE (fndecl
))
1971 CASE_FLT_FN (BUILT_IN_SQRT
):
1972 errno_set
= ! tree_expr_nonnegative_p (arg
);
1973 builtin_optab
= sqrt_optab
;
1975 CASE_FLT_FN (BUILT_IN_EXP
):
1976 errno_set
= true; builtin_optab
= exp_optab
; break;
1977 CASE_FLT_FN (BUILT_IN_EXP10
):
1978 CASE_FLT_FN (BUILT_IN_POW10
):
1979 errno_set
= true; builtin_optab
= exp10_optab
; break;
1980 CASE_FLT_FN (BUILT_IN_EXP2
):
1981 errno_set
= true; builtin_optab
= exp2_optab
; break;
1982 CASE_FLT_FN (BUILT_IN_EXPM1
):
1983 errno_set
= true; builtin_optab
= expm1_optab
; break;
1984 CASE_FLT_FN (BUILT_IN_LOGB
):
1985 errno_set
= true; builtin_optab
= logb_optab
; break;
1986 CASE_FLT_FN (BUILT_IN_LOG
):
1987 errno_set
= true; builtin_optab
= log_optab
; break;
1988 CASE_FLT_FN (BUILT_IN_LOG10
):
1989 errno_set
= true; builtin_optab
= log10_optab
; break;
1990 CASE_FLT_FN (BUILT_IN_LOG2
):
1991 errno_set
= true; builtin_optab
= log2_optab
; break;
1992 CASE_FLT_FN (BUILT_IN_LOG1P
):
1993 errno_set
= true; builtin_optab
= log1p_optab
; break;
1994 CASE_FLT_FN (BUILT_IN_ASIN
):
1995 builtin_optab
= asin_optab
; break;
1996 CASE_FLT_FN (BUILT_IN_ACOS
):
1997 builtin_optab
= acos_optab
; break;
1998 CASE_FLT_FN (BUILT_IN_TAN
):
1999 builtin_optab
= tan_optab
; break;
2000 CASE_FLT_FN (BUILT_IN_ATAN
):
2001 builtin_optab
= atan_optab
; break;
2002 CASE_FLT_FN (BUILT_IN_FLOOR
):
2003 builtin_optab
= floor_optab
; break;
2004 CASE_FLT_FN (BUILT_IN_CEIL
):
2005 builtin_optab
= ceil_optab
; break;
2006 CASE_FLT_FN (BUILT_IN_TRUNC
):
2007 builtin_optab
= btrunc_optab
; break;
2008 CASE_FLT_FN (BUILT_IN_ROUND
):
2009 builtin_optab
= round_optab
; break;
2010 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2011 builtin_optab
= nearbyint_optab
;
2012 if (flag_trapping_math
)
2014 /* Else fallthrough and expand as rint. */
2015 CASE_FLT_FN (BUILT_IN_RINT
):
2016 builtin_optab
= rint_optab
; break;
2017 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2018 builtin_optab
= significand_optab
; break;
2023 /* Make a suitable register to place result in. */
2024 mode
= TYPE_MODE (TREE_TYPE (exp
));
2026 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2029 /* Before working hard, check whether the instruction is available. */
2030 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2031 && (!errno_set
|| !optimize_insn_for_size_p ()))
2033 target
= gen_reg_rtx (mode
);
2035 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2036 need to expand the argument again. This way, we will not perform
2037 side-effects more the once. */
2038 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2040 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2044 /* Compute into TARGET.
2045 Set TARGET to wherever the result comes back. */
2046 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2051 expand_errno_check (exp
, target
);
2053 /* Output the entire sequence. */
2054 insns
= get_insns ();
2060 /* If we were unable to expand via the builtin, stop the sequence
2061 (without outputting the insns) and call to the library function
2062 with the stabilized argument list. */
2066 return expand_call (exp
, target
, target
== const0_rtx
);
2069 /* Expand a call to the builtin binary math functions (pow and atan2).
2070 Return NULL_RTX if a normal call should be emitted rather than expanding the
2071 function in-line. EXP is the expression that is a call to the builtin
2072 function; if convenient, the result should be placed in TARGET.
2073 SUBTARGET may be used as the target for computing one of EXP's
2077 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2079 optab builtin_optab
;
2080 rtx op0
, op1
, insns
;
2081 int op1_type
= REAL_TYPE
;
2082 tree fndecl
= get_callee_fndecl (exp
);
2084 enum machine_mode mode
;
2085 bool errno_set
= true;
2087 switch (DECL_FUNCTION_CODE (fndecl
))
2089 CASE_FLT_FN (BUILT_IN_SCALBN
):
2090 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2091 CASE_FLT_FN (BUILT_IN_LDEXP
):
2092 op1_type
= INTEGER_TYPE
;
2097 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2100 arg0
= CALL_EXPR_ARG (exp
, 0);
2101 arg1
= CALL_EXPR_ARG (exp
, 1);
2103 switch (DECL_FUNCTION_CODE (fndecl
))
2105 CASE_FLT_FN (BUILT_IN_POW
):
2106 builtin_optab
= pow_optab
; break;
2107 CASE_FLT_FN (BUILT_IN_ATAN2
):
2108 builtin_optab
= atan2_optab
; break;
2109 CASE_FLT_FN (BUILT_IN_SCALB
):
2110 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2112 builtin_optab
= scalb_optab
; break;
2113 CASE_FLT_FN (BUILT_IN_SCALBN
):
2114 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2115 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2117 /* Fall through... */
2118 CASE_FLT_FN (BUILT_IN_LDEXP
):
2119 builtin_optab
= ldexp_optab
; break;
2120 CASE_FLT_FN (BUILT_IN_FMOD
):
2121 builtin_optab
= fmod_optab
; break;
2122 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2123 CASE_FLT_FN (BUILT_IN_DREM
):
2124 builtin_optab
= remainder_optab
; break;
2129 /* Make a suitable register to place result in. */
2130 mode
= TYPE_MODE (TREE_TYPE (exp
));
2132 /* Before working hard, check whether the instruction is available. */
2133 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2136 target
= gen_reg_rtx (mode
);
2138 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2141 if (errno_set
&& optimize_insn_for_size_p ())
2144 /* Always stabilize the argument list. */
2145 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2146 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2148 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2149 op1
= expand_normal (arg1
);
2153 /* Compute into TARGET.
2154 Set TARGET to wherever the result comes back. */
2155 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2156 target
, 0, OPTAB_DIRECT
);
2158 /* If we were unable to expand via the builtin, stop the sequence
2159 (without outputting the insns) and call to the library function
2160 with the stabilized argument list. */
2164 return expand_call (exp
, target
, target
== const0_rtx
);
2168 expand_errno_check (exp
, target
);
2170 /* Output the entire sequence. */
2171 insns
= get_insns ();
2178 /* Expand a call to the builtin trinary math functions (fma).
2179 Return NULL_RTX if a normal call should be emitted rather than expanding the
2180 function in-line. EXP is the expression that is a call to the builtin
2181 function; if convenient, the result should be placed in TARGET.
2182 SUBTARGET may be used as the target for computing one of EXP's
2186 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2188 optab builtin_optab
;
2189 rtx op0
, op1
, op2
, insns
;
2190 tree fndecl
= get_callee_fndecl (exp
);
2191 tree arg0
, arg1
, arg2
;
2192 enum machine_mode mode
;
2194 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2197 arg0
= CALL_EXPR_ARG (exp
, 0);
2198 arg1
= CALL_EXPR_ARG (exp
, 1);
2199 arg2
= CALL_EXPR_ARG (exp
, 2);
2201 switch (DECL_FUNCTION_CODE (fndecl
))
2203 CASE_FLT_FN (BUILT_IN_FMA
):
2204 builtin_optab
= fma_optab
; break;
2209 /* Make a suitable register to place result in. */
2210 mode
= TYPE_MODE (TREE_TYPE (exp
));
2212 /* Before working hard, check whether the instruction is available. */
2213 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2216 target
= gen_reg_rtx (mode
);
2218 /* Always stabilize the argument list. */
2219 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2220 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2221 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2223 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2224 op1
= expand_normal (arg1
);
2225 op2
= expand_normal (arg2
);
2229 /* Compute into TARGET.
2230 Set TARGET to wherever the result comes back. */
2231 target
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2240 return expand_call (exp
, target
, target
== const0_rtx
);
2243 /* Output the entire sequence. */
2244 insns
= get_insns ();
2251 /* Expand a call to the builtin sin and cos math functions.
2252 Return NULL_RTX if a normal call should be emitted rather than expanding the
2253 function in-line. EXP is the expression that is a call to the builtin
2254 function; if convenient, the result should be placed in TARGET.
2255 SUBTARGET may be used as the target for computing one of EXP's
2259 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2261 optab builtin_optab
;
2263 tree fndecl
= get_callee_fndecl (exp
);
2264 enum machine_mode mode
;
2267 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2270 arg
= CALL_EXPR_ARG (exp
, 0);
2272 switch (DECL_FUNCTION_CODE (fndecl
))
2274 CASE_FLT_FN (BUILT_IN_SIN
):
2275 CASE_FLT_FN (BUILT_IN_COS
):
2276 builtin_optab
= sincos_optab
; break;
2281 /* Make a suitable register to place result in. */
2282 mode
= TYPE_MODE (TREE_TYPE (exp
));
2284 /* Check if sincos insn is available, otherwise fallback
2285 to sin or cos insn. */
2286 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2287 switch (DECL_FUNCTION_CODE (fndecl
))
2289 CASE_FLT_FN (BUILT_IN_SIN
):
2290 builtin_optab
= sin_optab
; break;
2291 CASE_FLT_FN (BUILT_IN_COS
):
2292 builtin_optab
= cos_optab
; break;
2297 /* Before working hard, check whether the instruction is available. */
2298 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2300 target
= gen_reg_rtx (mode
);
2302 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2303 need to expand the argument again. This way, we will not perform
2304 side-effects more the once. */
2305 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2307 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2311 /* Compute into TARGET.
2312 Set TARGET to wherever the result comes back. */
2313 if (builtin_optab
== sincos_optab
)
2317 switch (DECL_FUNCTION_CODE (fndecl
))
2319 CASE_FLT_FN (BUILT_IN_SIN
):
2320 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2322 CASE_FLT_FN (BUILT_IN_COS
):
2323 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2328 gcc_assert (result
);
2332 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2337 /* Output the entire sequence. */
2338 insns
= get_insns ();
2344 /* If we were unable to expand via the builtin, stop the sequence
2345 (without outputting the insns) and call to the library function
2346 with the stabilized argument list. */
2350 target
= expand_call (exp
, target
, target
== const0_rtx
);
2355 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2356 return an RTL instruction code that implements the functionality.
2357 If that isn't possible or available return CODE_FOR_nothing. */
2359 static enum insn_code
2360 interclass_mathfn_icode (tree arg
, tree fndecl
)
2362 bool errno_set
= false;
2363 optab builtin_optab
= unknown_optab
;
2364 enum machine_mode mode
;
2366 switch (DECL_FUNCTION_CODE (fndecl
))
2368 CASE_FLT_FN (BUILT_IN_ILOGB
):
2369 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2370 CASE_FLT_FN (BUILT_IN_ISINF
):
2371 builtin_optab
= isinf_optab
; break;
2372 case BUILT_IN_ISNORMAL
:
2373 case BUILT_IN_ISFINITE
:
2374 CASE_FLT_FN (BUILT_IN_FINITE
):
2375 case BUILT_IN_FINITED32
:
2376 case BUILT_IN_FINITED64
:
2377 case BUILT_IN_FINITED128
:
2378 case BUILT_IN_ISINFD32
:
2379 case BUILT_IN_ISINFD64
:
2380 case BUILT_IN_ISINFD128
:
2381 /* These builtins have no optabs (yet). */
2387 /* There's no easy way to detect the case we need to set EDOM. */
2388 if (flag_errno_math
&& errno_set
)
2389 return CODE_FOR_nothing
;
2391 /* Optab mode depends on the mode of the input argument. */
2392 mode
= TYPE_MODE (TREE_TYPE (arg
));
2395 return optab_handler (builtin_optab
, mode
);
2396 return CODE_FOR_nothing
;
2399 /* Expand a call to one of the builtin math functions that operate on
2400 floating point argument and output an integer result (ilogb, isinf,
2402 Return 0 if a normal call should be emitted rather than expanding the
2403 function in-line. EXP is the expression that is a call to the builtin
2404 function; if convenient, the result should be placed in TARGET. */
2407 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2409 enum insn_code icode
= CODE_FOR_nothing
;
2411 tree fndecl
= get_callee_fndecl (exp
);
2412 enum machine_mode mode
;
2415 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2418 arg
= CALL_EXPR_ARG (exp
, 0);
2419 icode
= interclass_mathfn_icode (arg
, fndecl
);
2420 mode
= TYPE_MODE (TREE_TYPE (arg
));
2422 if (icode
!= CODE_FOR_nothing
)
2424 struct expand_operand ops
[1];
2425 rtx last
= get_last_insn ();
2426 tree orig_arg
= arg
;
2428 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2429 need to expand the argument again. This way, we will not perform
2430 side-effects more the once. */
2431 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2433 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2435 if (mode
!= GET_MODE (op0
))
2436 op0
= convert_to_mode (mode
, op0
, 0);
2438 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2439 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2440 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2441 return ops
[0].value
;
2443 delete_insns_since (last
);
2444 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2450 /* Expand a call to the builtin sincos math function.
2451 Return NULL_RTX if a normal call should be emitted rather than expanding the
2452 function in-line. EXP is the expression that is a call to the builtin
2456 expand_builtin_sincos (tree exp
)
2458 rtx op0
, op1
, op2
, target1
, target2
;
2459 enum machine_mode mode
;
2460 tree arg
, sinp
, cosp
;
2462 location_t loc
= EXPR_LOCATION (exp
);
2463 tree alias_type
, alias_off
;
2465 if (!validate_arglist (exp
, REAL_TYPE
,
2466 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2469 arg
= CALL_EXPR_ARG (exp
, 0);
2470 sinp
= CALL_EXPR_ARG (exp
, 1);
2471 cosp
= CALL_EXPR_ARG (exp
, 2);
2473 /* Make a suitable register to place result in. */
2474 mode
= TYPE_MODE (TREE_TYPE (arg
));
2476 /* Check if sincos insn is available, otherwise emit the call. */
2477 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2480 target1
= gen_reg_rtx (mode
);
2481 target2
= gen_reg_rtx (mode
);
2483 op0
= expand_normal (arg
);
2484 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2485 alias_off
= build_int_cst (alias_type
, 0);
2486 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2488 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2491 /* Compute into target1 and target2.
2492 Set TARGET to wherever the result comes back. */
2493 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2494 gcc_assert (result
);
2496 /* Move target1 and target2 to the memory locations indicated
2498 emit_move_insn (op1
, target1
);
2499 emit_move_insn (op2
, target2
);
2504 /* Expand a call to the internal cexpi builtin to the sincos math function.
2505 EXP is the expression that is a call to the builtin function; if convenient,
2506 the result should be placed in TARGET. */
2509 expand_builtin_cexpi (tree exp
, rtx target
)
2511 tree fndecl
= get_callee_fndecl (exp
);
2513 enum machine_mode mode
;
2515 location_t loc
= EXPR_LOCATION (exp
);
2517 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2520 arg
= CALL_EXPR_ARG (exp
, 0);
2521 type
= TREE_TYPE (arg
);
2522 mode
= TYPE_MODE (TREE_TYPE (arg
));
2524 /* Try expanding via a sincos optab, fall back to emitting a libcall
2525 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2526 is only generated from sincos, cexp or if we have either of them. */
2527 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2529 op1
= gen_reg_rtx (mode
);
2530 op2
= gen_reg_rtx (mode
);
2532 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2534 /* Compute into op1 and op2. */
2535 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2537 else if (TARGET_HAS_SINCOS
)
2539 tree call
, fn
= NULL_TREE
;
2543 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2544 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2545 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2546 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2547 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2548 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2552 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2553 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2554 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2555 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2556 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2557 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2559 /* Make sure not to fold the sincos call again. */
2560 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2561 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2562 call
, 3, arg
, top1
, top2
));
2566 tree call
, fn
= NULL_TREE
, narg
;
2567 tree ctype
= build_complex_type (type
);
2569 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2570 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2571 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2572 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2573 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2574 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2578 /* If we don't have a decl for cexp create one. This is the
2579 friendliest fallback if the user calls __builtin_cexpi
2580 without full target C99 function support. */
2581 if (fn
== NULL_TREE
)
2584 const char *name
= NULL
;
2586 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2588 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2590 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2593 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2594 fn
= build_fn_decl (name
, fntype
);
2597 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2598 build_real (type
, dconst0
), arg
);
2600 /* Make sure not to fold the cexp call again. */
2601 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2602 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2603 target
, VOIDmode
, EXPAND_NORMAL
);
2606 /* Now build the proper return type. */
2607 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2608 make_tree (TREE_TYPE (arg
), op2
),
2609 make_tree (TREE_TYPE (arg
), op1
)),
2610 target
, VOIDmode
, EXPAND_NORMAL
);
2613 /* Conveniently construct a function call expression. FNDECL names the
2614 function to be called, N is the number of arguments, and the "..."
2615 parameters are the argument expressions. Unlike build_call_exr
2616 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2619 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2622 tree fntype
= TREE_TYPE (fndecl
);
2623 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2626 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2628 SET_EXPR_LOCATION (fn
, loc
);
2632 /* Expand a call to one of the builtin rounding functions gcc defines
2633 as an extension (lfloor and lceil). As these are gcc extensions we
2634 do not need to worry about setting errno to EDOM.
2635 If expanding via optab fails, lower expression to (int)(floor(x)).
2636 EXP is the expression that is a call to the builtin function;
2637 if convenient, the result should be placed in TARGET. */
2640 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2642 convert_optab builtin_optab
;
2643 rtx op0
, insns
, tmp
;
2644 tree fndecl
= get_callee_fndecl (exp
);
2645 enum built_in_function fallback_fn
;
2646 tree fallback_fndecl
;
2647 enum machine_mode mode
;
2650 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2653 arg
= CALL_EXPR_ARG (exp
, 0);
2655 switch (DECL_FUNCTION_CODE (fndecl
))
2657 CASE_FLT_FN (BUILT_IN_ICEIL
):
2658 CASE_FLT_FN (BUILT_IN_LCEIL
):
2659 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2660 builtin_optab
= lceil_optab
;
2661 fallback_fn
= BUILT_IN_CEIL
;
2664 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2665 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2666 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2667 builtin_optab
= lfloor_optab
;
2668 fallback_fn
= BUILT_IN_FLOOR
;
2675 /* Make a suitable register to place result in. */
2676 mode
= TYPE_MODE (TREE_TYPE (exp
));
2678 target
= gen_reg_rtx (mode
);
2680 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2681 need to expand the argument again. This way, we will not perform
2682 side-effects more the once. */
2683 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2685 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2689 /* Compute into TARGET. */
2690 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2692 /* Output the entire sequence. */
2693 insns
= get_insns ();
2699 /* If we were unable to expand via the builtin, stop the sequence
2700 (without outputting the insns). */
2703 /* Fall back to floating point rounding optab. */
2704 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2706 /* For non-C99 targets we may end up without a fallback fndecl here
2707 if the user called __builtin_lfloor directly. In this case emit
2708 a call to the floor/ceil variants nevertheless. This should result
2709 in the best user experience for not full C99 targets. */
2710 if (fallback_fndecl
== NULL_TREE
)
2713 const char *name
= NULL
;
2715 switch (DECL_FUNCTION_CODE (fndecl
))
2717 case BUILT_IN_ICEIL
:
2718 case BUILT_IN_LCEIL
:
2719 case BUILT_IN_LLCEIL
:
2722 case BUILT_IN_ICEILF
:
2723 case BUILT_IN_LCEILF
:
2724 case BUILT_IN_LLCEILF
:
2727 case BUILT_IN_ICEILL
:
2728 case BUILT_IN_LCEILL
:
2729 case BUILT_IN_LLCEILL
:
2732 case BUILT_IN_IFLOOR
:
2733 case BUILT_IN_LFLOOR
:
2734 case BUILT_IN_LLFLOOR
:
2737 case BUILT_IN_IFLOORF
:
2738 case BUILT_IN_LFLOORF
:
2739 case BUILT_IN_LLFLOORF
:
2742 case BUILT_IN_IFLOORL
:
2743 case BUILT_IN_LFLOORL
:
2744 case BUILT_IN_LLFLOORL
:
2751 fntype
= build_function_type_list (TREE_TYPE (arg
),
2752 TREE_TYPE (arg
), NULL_TREE
);
2753 fallback_fndecl
= build_fn_decl (name
, fntype
);
2756 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2758 tmp
= expand_normal (exp
);
2760 /* Truncate the result of floating point optab to integer
2761 via expand_fix (). */
2762 target
= gen_reg_rtx (mode
);
2763 expand_fix (target
, tmp
, 0);
2768 /* Expand a call to one of the builtin math functions doing integer
2770 Return 0 if a normal call should be emitted rather than expanding the
2771 function in-line. EXP is the expression that is a call to the builtin
2772 function; if convenient, the result should be placed in TARGET. */
2775 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2777 convert_optab builtin_optab
;
2779 tree fndecl
= get_callee_fndecl (exp
);
2781 enum machine_mode mode
;
2782 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2784 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2787 arg
= CALL_EXPR_ARG (exp
, 0);
2789 switch (DECL_FUNCTION_CODE (fndecl
))
2791 CASE_FLT_FN (BUILT_IN_IRINT
):
2792 fallback_fn
= BUILT_IN_LRINT
;
2794 CASE_FLT_FN (BUILT_IN_LRINT
):
2795 CASE_FLT_FN (BUILT_IN_LLRINT
):
2796 builtin_optab
= lrint_optab
;
2799 CASE_FLT_FN (BUILT_IN_IROUND
):
2800 fallback_fn
= BUILT_IN_LROUND
;
2802 CASE_FLT_FN (BUILT_IN_LROUND
):
2803 CASE_FLT_FN (BUILT_IN_LLROUND
):
2804 builtin_optab
= lround_optab
;
2811 /* There's no easy way to detect the case we need to set EDOM. */
2812 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2815 /* Make a suitable register to place result in. */
2816 mode
= TYPE_MODE (TREE_TYPE (exp
));
2818 /* There's no easy way to detect the case we need to set EDOM. */
2819 if (!flag_errno_math
)
2821 target
= gen_reg_rtx (mode
);
2823 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2824 need to expand the argument again. This way, we will not perform
2825 side-effects more the once. */
2826 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2828 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2832 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2834 /* Output the entire sequence. */
2835 insns
= get_insns ();
2841 /* If we were unable to expand via the builtin, stop the sequence
2842 (without outputting the insns) and call to the library function
2843 with the stabilized argument list. */
2847 if (fallback_fn
!= BUILT_IN_NONE
)
2849 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2850 targets, (int) round (x) should never be transformed into
2851 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2852 a call to lround in the hope that the target provides at least some
2853 C99 functions. This should result in the best user experience for
2854 not full C99 targets. */
2855 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2858 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2859 fallback_fndecl
, 1, arg
);
2861 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2862 return convert_to_mode (mode
, target
, 0);
2865 target
= expand_call (exp
, target
, target
== const0_rtx
);
2870 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2871 a normal call should be emitted rather than expanding the function
2872 in-line. EXP is the expression that is a call to the builtin
2873 function; if convenient, the result should be placed in TARGET. */
2876 expand_builtin_powi (tree exp
, rtx target
)
2880 enum machine_mode mode
;
2881 enum machine_mode mode2
;
2883 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2886 arg0
= CALL_EXPR_ARG (exp
, 0);
2887 arg1
= CALL_EXPR_ARG (exp
, 1);
2888 mode
= TYPE_MODE (TREE_TYPE (exp
));
2890 /* Emit a libcall to libgcc. */
2892 /* Mode of the 2nd argument must match that of an int. */
2893 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2895 if (target
== NULL_RTX
)
2896 target
= gen_reg_rtx (mode
);
2898 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2899 if (GET_MODE (op0
) != mode
)
2900 op0
= convert_to_mode (mode
, op0
, 0);
2901 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2902 if (GET_MODE (op1
) != mode2
)
2903 op1
= convert_to_mode (mode2
, op1
, 0);
2905 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2906 target
, LCT_CONST
, mode
, 2,
2907 op0
, mode
, op1
, mode2
);
2912 /* Expand expression EXP which is a call to the strlen builtin. Return
2913 NULL_RTX if we failed the caller should emit a normal call, otherwise
2914 try to get the result in TARGET, if convenient. */
2917 expand_builtin_strlen (tree exp
, rtx target
,
2918 enum machine_mode target_mode
)
2920 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2924 struct expand_operand ops
[4];
2927 tree src
= CALL_EXPR_ARG (exp
, 0);
2928 rtx src_reg
, before_strlen
;
2929 enum machine_mode insn_mode
= target_mode
;
2930 enum insn_code icode
= CODE_FOR_nothing
;
2933 /* If the length can be computed at compile-time, return it. */
2934 len
= c_strlen (src
, 0);
2936 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2938 /* If the length can be computed at compile-time and is constant
2939 integer, but there are side-effects in src, evaluate
2940 src for side-effects, then return len.
2941 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2942 can be optimized into: i++; x = 3; */
2943 len
= c_strlen (src
, 1);
2944 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2946 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2947 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2950 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2952 /* If SRC is not a pointer type, don't do this operation inline. */
2956 /* Bail out if we can't compute strlen in the right mode. */
2957 while (insn_mode
!= VOIDmode
)
2959 icode
= optab_handler (strlen_optab
, insn_mode
);
2960 if (icode
!= CODE_FOR_nothing
)
2963 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2965 if (insn_mode
== VOIDmode
)
2968 /* Make a place to hold the source address. We will not expand
2969 the actual source until we are sure that the expansion will
2970 not fail -- there are trees that cannot be expanded twice. */
2971 src_reg
= gen_reg_rtx (Pmode
);
2973 /* Mark the beginning of the strlen sequence so we can emit the
2974 source operand later. */
2975 before_strlen
= get_last_insn ();
2977 create_output_operand (&ops
[0], target
, insn_mode
);
2978 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2979 create_integer_operand (&ops
[2], 0);
2980 create_integer_operand (&ops
[3], align
);
2981 if (!maybe_expand_insn (icode
, 4, ops
))
2984 /* Now that we are assured of success, expand the source. */
2986 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2989 #ifdef POINTERS_EXTEND_UNSIGNED
2990 if (GET_MODE (pat
) != Pmode
)
2991 pat
= convert_to_mode (Pmode
, pat
,
2992 POINTERS_EXTEND_UNSIGNED
);
2994 emit_move_insn (src_reg
, pat
);
3000 emit_insn_after (pat
, before_strlen
);
3002 emit_insn_before (pat
, get_insns ());
3004 /* Return the value in the proper mode for this function. */
3005 if (GET_MODE (ops
[0].value
) == target_mode
)
3006 target
= ops
[0].value
;
3007 else if (target
!= 0)
3008 convert_move (target
, ops
[0].value
, 0);
3010 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3016 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3017 bytes from constant string DATA + OFFSET and return it as target
3021 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3022 enum machine_mode mode
)
3024 const char *str
= (const char *) data
;
3026 gcc_assert (offset
>= 0
3027 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3028 <= strlen (str
) + 1));
3030 return c_readstr (str
+ offset
, mode
);
3033 /* Expand a call EXP to the memcpy builtin.
3034 Return NULL_RTX if we failed, the caller should emit a normal call,
3035 otherwise try to get the result in TARGET, if convenient (and in
3036 mode MODE if that's convenient). */
3039 expand_builtin_memcpy (tree exp
, rtx target
)
3041 if (!validate_arglist (exp
,
3042 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3046 tree dest
= CALL_EXPR_ARG (exp
, 0);
3047 tree src
= CALL_EXPR_ARG (exp
, 1);
3048 tree len
= CALL_EXPR_ARG (exp
, 2);
3049 const char *src_str
;
3050 unsigned int src_align
= get_pointer_alignment (src
);
3051 unsigned int dest_align
= get_pointer_alignment (dest
);
3052 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3053 HOST_WIDE_INT expected_size
= -1;
3054 unsigned int expected_align
= 0;
3056 /* If DEST is not a pointer type, call the normal function. */
3057 if (dest_align
== 0)
3060 /* If either SRC is not a pointer type, don't do this
3061 operation in-line. */
3065 if (currently_expanding_gimple_stmt
)
3066 stringop_block_profile (currently_expanding_gimple_stmt
,
3067 &expected_align
, &expected_size
);
3069 if (expected_align
< dest_align
)
3070 expected_align
= dest_align
;
3071 dest_mem
= get_memory_rtx (dest
, len
);
3072 set_mem_align (dest_mem
, dest_align
);
3073 len_rtx
= expand_normal (len
);
3074 src_str
= c_getstr (src
);
3076 /* If SRC is a string constant and block move would be done
3077 by pieces, we can avoid loading the string from memory
3078 and only stored the computed constants. */
3080 && CONST_INT_P (len_rtx
)
3081 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3082 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3083 CONST_CAST (char *, src_str
),
3086 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3087 builtin_memcpy_read_str
,
3088 CONST_CAST (char *, src_str
),
3089 dest_align
, false, 0);
3090 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3091 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3095 src_mem
= get_memory_rtx (src
, len
);
3096 set_mem_align (src_mem
, src_align
);
3098 /* Copy word part most expediently. */
3099 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3100 CALL_EXPR_TAILCALL (exp
)
3101 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3102 expected_align
, expected_size
);
3106 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3107 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3113 /* Expand a call EXP to the mempcpy builtin.
3114 Return NULL_RTX if we failed; the caller should emit a normal call,
3115 otherwise try to get the result in TARGET, if convenient (and in
3116 mode MODE if that's convenient). If ENDP is 0 return the
3117 destination pointer, if ENDP is 1 return the end pointer ala
3118 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3122 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3124 if (!validate_arglist (exp
,
3125 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3129 tree dest
= CALL_EXPR_ARG (exp
, 0);
3130 tree src
= CALL_EXPR_ARG (exp
, 1);
3131 tree len
= CALL_EXPR_ARG (exp
, 2);
3132 return expand_builtin_mempcpy_args (dest
, src
, len
,
3133 target
, mode
, /*endp=*/ 1);
3137 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3138 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3139 so that this can also be called without constructing an actual CALL_EXPR.
3140 The other arguments and return value are the same as for
3141 expand_builtin_mempcpy. */
3144 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3145 rtx target
, enum machine_mode mode
, int endp
)
3147 /* If return value is ignored, transform mempcpy into memcpy. */
3148 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3150 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3151 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3153 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3157 const char *src_str
;
3158 unsigned int src_align
= get_pointer_alignment (src
);
3159 unsigned int dest_align
= get_pointer_alignment (dest
);
3160 rtx dest_mem
, src_mem
, len_rtx
;
3162 /* If either SRC or DEST is not a pointer type, don't do this
3163 operation in-line. */
3164 if (dest_align
== 0 || src_align
== 0)
3167 /* If LEN is not constant, call the normal function. */
3168 if (! host_integerp (len
, 1))
3171 len_rtx
= expand_normal (len
);
3172 src_str
= c_getstr (src
);
3174 /* If SRC is a string constant and block move would be done
3175 by pieces, we can avoid loading the string from memory
3176 and only stored the computed constants. */
3178 && CONST_INT_P (len_rtx
)
3179 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3180 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3181 CONST_CAST (char *, src_str
),
3184 dest_mem
= get_memory_rtx (dest
, len
);
3185 set_mem_align (dest_mem
, dest_align
);
3186 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3187 builtin_memcpy_read_str
,
3188 CONST_CAST (char *, src_str
),
3189 dest_align
, false, endp
);
3190 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3191 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3195 if (CONST_INT_P (len_rtx
)
3196 && can_move_by_pieces (INTVAL (len_rtx
),
3197 MIN (dest_align
, src_align
)))
3199 dest_mem
= get_memory_rtx (dest
, len
);
3200 set_mem_align (dest_mem
, dest_align
);
3201 src_mem
= get_memory_rtx (src
, len
);
3202 set_mem_align (src_mem
, src_align
);
3203 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3204 MIN (dest_align
, src_align
), endp
);
3205 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3206 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3215 # define HAVE_movstr 0
3216 # define CODE_FOR_movstr CODE_FOR_nothing
3219 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3220 we failed, the caller should emit a normal call, otherwise try to
3221 get the result in TARGET, if convenient. If ENDP is 0 return the
3222 destination pointer, if ENDP is 1 return the end pointer ala
3223 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3227 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3229 struct expand_operand ops
[3];
3236 dest_mem
= get_memory_rtx (dest
, NULL
);
3237 src_mem
= get_memory_rtx (src
, NULL
);
3240 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3241 dest_mem
= replace_equiv_address (dest_mem
, target
);
3244 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3245 create_fixed_operand (&ops
[1], dest_mem
);
3246 create_fixed_operand (&ops
[2], src_mem
);
3247 expand_insn (CODE_FOR_movstr
, 3, ops
);
3249 if (endp
&& target
!= const0_rtx
)
3251 target
= ops
[0].value
;
3252 /* movstr is supposed to set end to the address of the NUL
3253 terminator. If the caller requested a mempcpy-like return value,
3257 rtx tem
= plus_constant (GET_MODE (target
),
3258 gen_lowpart (GET_MODE (target
), target
), 1);
3259 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3265 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3266 NULL_RTX if we failed the caller should emit a normal call, otherwise
3267 try to get the result in TARGET, if convenient (and in mode MODE if that's
3271 expand_builtin_strcpy (tree exp
, rtx target
)
3273 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3275 tree dest
= CALL_EXPR_ARG (exp
, 0);
3276 tree src
= CALL_EXPR_ARG (exp
, 1);
3277 return expand_builtin_strcpy_args (dest
, src
, target
);
3282 /* Helper function to do the actual work for expand_builtin_strcpy. The
3283 arguments to the builtin_strcpy call DEST and SRC are broken out
3284 so that this can also be called without constructing an actual CALL_EXPR.
3285 The other arguments and return value are the same as for
3286 expand_builtin_strcpy. */
3289 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3291 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3294 /* Expand a call EXP to the stpcpy builtin.
3295 Return NULL_RTX if we failed the caller should emit a normal call,
3296 otherwise try to get the result in TARGET, if convenient (and in
3297 mode MODE if that's convenient). */
3300 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3303 location_t loc
= EXPR_LOCATION (exp
);
3305 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3308 dst
= CALL_EXPR_ARG (exp
, 0);
3309 src
= CALL_EXPR_ARG (exp
, 1);
3311 /* If return value is ignored, transform stpcpy into strcpy. */
3312 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3314 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3315 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3316 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3323 /* Ensure we get an actual string whose length can be evaluated at
3324 compile-time, not an expression containing a string. This is
3325 because the latter will potentially produce pessimized code
3326 when used to produce the return value. */
3327 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3328 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3330 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3331 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3332 target
, mode
, /*endp=*/2);
3337 if (TREE_CODE (len
) == INTEGER_CST
)
3339 rtx len_rtx
= expand_normal (len
);
3341 if (CONST_INT_P (len_rtx
))
3343 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3349 if (mode
!= VOIDmode
)
3350 target
= gen_reg_rtx (mode
);
3352 target
= gen_reg_rtx (GET_MODE (ret
));
3354 if (GET_MODE (target
) != GET_MODE (ret
))
3355 ret
= gen_lowpart (GET_MODE (target
), ret
);
3357 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3358 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3366 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3370 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3371 bytes from constant string DATA + OFFSET and return it as target
3375 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3376 enum machine_mode mode
)
3378 const char *str
= (const char *) data
;
3380 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3383 return c_readstr (str
+ offset
, mode
);
3386 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3387 NULL_RTX if we failed the caller should emit a normal call. */
3390 expand_builtin_strncpy (tree exp
, rtx target
)
3392 location_t loc
= EXPR_LOCATION (exp
);
3394 if (validate_arglist (exp
,
3395 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3397 tree dest
= CALL_EXPR_ARG (exp
, 0);
3398 tree src
= CALL_EXPR_ARG (exp
, 1);
3399 tree len
= CALL_EXPR_ARG (exp
, 2);
3400 tree slen
= c_strlen (src
, 1);
3402 /* We must be passed a constant len and src parameter. */
3403 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3406 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3408 /* We're required to pad with trailing zeros if the requested
3409 len is greater than strlen(s2)+1. In that case try to
3410 use store_by_pieces, if it fails, punt. */
3411 if (tree_int_cst_lt (slen
, len
))
3413 unsigned int dest_align
= get_pointer_alignment (dest
);
3414 const char *p
= c_getstr (src
);
3417 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3418 || !can_store_by_pieces (tree_low_cst (len
, 1),
3419 builtin_strncpy_read_str
,
3420 CONST_CAST (char *, p
),
3424 dest_mem
= get_memory_rtx (dest
, len
);
3425 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3426 builtin_strncpy_read_str
,
3427 CONST_CAST (char *, p
), dest_align
, false, 0);
3428 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3429 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3436 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3437 bytes from constant string DATA + OFFSET and return it as target
3441 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3442 enum machine_mode mode
)
3444 const char *c
= (const char *) data
;
3445 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3447 memset (p
, *c
, GET_MODE_SIZE (mode
));
3449 return c_readstr (p
, mode
);
3452 /* Callback routine for store_by_pieces. Return the RTL of a register
3453 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3454 char value given in the RTL register data. For example, if mode is
3455 4 bytes wide, return the RTL for 0x01010101*data. */
3458 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3459 enum machine_mode mode
)
3465 size
= GET_MODE_SIZE (mode
);
3469 p
= XALLOCAVEC (char, size
);
3470 memset (p
, 1, size
);
3471 coeff
= c_readstr (p
, mode
);
3473 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3474 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3475 return force_reg (mode
, target
);
3478 /* Expand expression EXP, which is a call to the memset builtin. Return
3479 NULL_RTX if we failed the caller should emit a normal call, otherwise
3480 try to get the result in TARGET, if convenient (and in mode MODE if that's
3484 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3486 if (!validate_arglist (exp
,
3487 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3491 tree dest
= CALL_EXPR_ARG (exp
, 0);
3492 tree val
= CALL_EXPR_ARG (exp
, 1);
3493 tree len
= CALL_EXPR_ARG (exp
, 2);
3494 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3498 /* Helper function to do the actual work for expand_builtin_memset. The
3499 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3500 so that this can also be called without constructing an actual CALL_EXPR.
3501 The other arguments and return value are the same as for
3502 expand_builtin_memset. */
3505 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3506 rtx target
, enum machine_mode mode
, tree orig_exp
)
3509 enum built_in_function fcode
;
3510 enum machine_mode val_mode
;
3512 unsigned int dest_align
;
3513 rtx dest_mem
, dest_addr
, len_rtx
;
3514 HOST_WIDE_INT expected_size
= -1;
3515 unsigned int expected_align
= 0;
3517 dest_align
= get_pointer_alignment (dest
);
3519 /* If DEST is not a pointer type, don't do this operation in-line. */
3520 if (dest_align
== 0)
3523 if (currently_expanding_gimple_stmt
)
3524 stringop_block_profile (currently_expanding_gimple_stmt
,
3525 &expected_align
, &expected_size
);
3527 if (expected_align
< dest_align
)
3528 expected_align
= dest_align
;
3530 /* If the LEN parameter is zero, return DEST. */
3531 if (integer_zerop (len
))
3533 /* Evaluate and ignore VAL in case it has side-effects. */
3534 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3535 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3538 /* Stabilize the arguments in case we fail. */
3539 dest
= builtin_save_expr (dest
);
3540 val
= builtin_save_expr (val
);
3541 len
= builtin_save_expr (len
);
3543 len_rtx
= expand_normal (len
);
3544 dest_mem
= get_memory_rtx (dest
, len
);
3545 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3547 if (TREE_CODE (val
) != INTEGER_CST
)
3551 val_rtx
= expand_normal (val
);
3552 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3554 /* Assume that we can memset by pieces if we can store
3555 * the coefficients by pieces (in the required modes).
3556 * We can't pass builtin_memset_gen_str as that emits RTL. */
3558 if (host_integerp (len
, 1)
3559 && can_store_by_pieces (tree_low_cst (len
, 1),
3560 builtin_memset_read_str
, &c
, dest_align
,
3563 val_rtx
= force_reg (val_mode
, val_rtx
);
3564 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3565 builtin_memset_gen_str
, val_rtx
, dest_align
,
3568 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3569 dest_align
, expected_align
,
3573 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3574 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3578 if (target_char_cast (val
, &c
))
3583 if (host_integerp (len
, 1)
3584 && can_store_by_pieces (tree_low_cst (len
, 1),
3585 builtin_memset_read_str
, &c
, dest_align
,
3587 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3588 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3589 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3590 gen_int_mode (c
, val_mode
),
3591 dest_align
, expected_align
,
3595 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3596 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3600 set_mem_align (dest_mem
, dest_align
);
3601 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3602 CALL_EXPR_TAILCALL (orig_exp
)
3603 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3604 expected_align
, expected_size
);
3608 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3609 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3615 fndecl
= get_callee_fndecl (orig_exp
);
3616 fcode
= DECL_FUNCTION_CODE (fndecl
);
3617 if (fcode
== BUILT_IN_MEMSET
)
3618 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3620 else if (fcode
== BUILT_IN_BZERO
)
3621 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3625 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3626 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3627 return expand_call (fn
, target
, target
== const0_rtx
);
3630 /* Expand expression EXP, which is a call to the bzero builtin. Return
3631 NULL_RTX if we failed the caller should emit a normal call. */
3634 expand_builtin_bzero (tree exp
)
3637 location_t loc
= EXPR_LOCATION (exp
);
3639 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3642 dest
= CALL_EXPR_ARG (exp
, 0);
3643 size
= CALL_EXPR_ARG (exp
, 1);
3645 /* New argument list transforming bzero(ptr x, int y) to
3646 memset(ptr x, int 0, size_t y). This is done this way
3647 so that if it isn't expanded inline, we fallback to
3648 calling bzero instead of memset. */
3650 return expand_builtin_memset_args (dest
, integer_zero_node
,
3651 fold_convert_loc (loc
,
3652 size_type_node
, size
),
3653 const0_rtx
, VOIDmode
, exp
);
3656 /* Expand expression EXP, which is a call to the memcmp built-in function.
3657 Return NULL_RTX if we failed and the caller should emit a normal call,
3658 otherwise try to get the result in TARGET, if convenient (and in mode
3659 MODE, if that's convenient). */
3662 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3663 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3665 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3667 if (!validate_arglist (exp
,
3668 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3671 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3672 implementing memcmp because it will stop if it encounters two
3674 #if defined HAVE_cmpmemsi
3676 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3679 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3680 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3681 tree len
= CALL_EXPR_ARG (exp
, 2);
3683 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3684 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3685 enum machine_mode insn_mode
;
3688 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3692 /* If we don't have POINTER_TYPE, call the function. */
3693 if (arg1_align
== 0 || arg2_align
== 0)
3696 /* Make a place to write the result of the instruction. */
3699 && REG_P (result
) && GET_MODE (result
) == insn_mode
3700 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3701 result
= gen_reg_rtx (insn_mode
);
3703 arg1_rtx
= get_memory_rtx (arg1
, len
);
3704 arg2_rtx
= get_memory_rtx (arg2
, len
);
3705 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3707 /* Set MEM_SIZE as appropriate. */
3708 if (CONST_INT_P (arg3_rtx
))
3710 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3711 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3715 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3716 GEN_INT (MIN (arg1_align
, arg2_align
)));
3723 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3724 TYPE_MODE (integer_type_node
), 3,
3725 XEXP (arg1_rtx
, 0), Pmode
,
3726 XEXP (arg2_rtx
, 0), Pmode
,
3727 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3728 TYPE_UNSIGNED (sizetype
)),
3729 TYPE_MODE (sizetype
));
3731 /* Return the value in the proper mode for this function. */
3732 mode
= TYPE_MODE (TREE_TYPE (exp
));
3733 if (GET_MODE (result
) == mode
)
3735 else if (target
!= 0)
3737 convert_move (target
, result
, 0);
3741 return convert_to_mode (mode
, result
, 0);
3743 #endif /* HAVE_cmpmemsi. */
3748 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3749 if we failed the caller should emit a normal call, otherwise try to get
3750 the result in TARGET, if convenient. */
3753 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3755 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3758 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3759 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3760 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3762 rtx arg1_rtx
, arg2_rtx
;
3763 rtx result
, insn
= NULL_RTX
;
3765 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3766 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3768 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3769 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3771 /* If we don't have POINTER_TYPE, call the function. */
3772 if (arg1_align
== 0 || arg2_align
== 0)
3775 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3776 arg1
= builtin_save_expr (arg1
);
3777 arg2
= builtin_save_expr (arg2
);
3779 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3780 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3782 #ifdef HAVE_cmpstrsi
3783 /* Try to call cmpstrsi. */
3786 enum machine_mode insn_mode
3787 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3789 /* Make a place to write the result of the instruction. */
3792 && REG_P (result
) && GET_MODE (result
) == insn_mode
3793 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3794 result
= gen_reg_rtx (insn_mode
);
3796 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3797 GEN_INT (MIN (arg1_align
, arg2_align
)));
3800 #ifdef HAVE_cmpstrnsi
3801 /* Try to determine at least one length and call cmpstrnsi. */
3802 if (!insn
&& HAVE_cmpstrnsi
)
3807 enum machine_mode insn_mode
3808 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3809 tree len1
= c_strlen (arg1
, 1);
3810 tree len2
= c_strlen (arg2
, 1);
3813 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3815 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3817 /* If we don't have a constant length for the first, use the length
3818 of the second, if we know it. We don't require a constant for
3819 this case; some cost analysis could be done if both are available
3820 but neither is constant. For now, assume they're equally cheap,
3821 unless one has side effects. If both strings have constant lengths,
3828 else if (TREE_SIDE_EFFECTS (len1
))
3830 else if (TREE_SIDE_EFFECTS (len2
))
3832 else if (TREE_CODE (len1
) != INTEGER_CST
)
3834 else if (TREE_CODE (len2
) != INTEGER_CST
)
3836 else if (tree_int_cst_lt (len1
, len2
))
3841 /* If both arguments have side effects, we cannot optimize. */
3842 if (!len
|| TREE_SIDE_EFFECTS (len
))
3845 arg3_rtx
= expand_normal (len
);
3847 /* Make a place to write the result of the instruction. */
3850 && REG_P (result
) && GET_MODE (result
) == insn_mode
3851 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3852 result
= gen_reg_rtx (insn_mode
);
3854 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3855 GEN_INT (MIN (arg1_align
, arg2_align
)));
3861 enum machine_mode mode
;
3864 /* Return the value in the proper mode for this function. */
3865 mode
= TYPE_MODE (TREE_TYPE (exp
));
3866 if (GET_MODE (result
) == mode
)
3869 return convert_to_mode (mode
, result
, 0);
3870 convert_move (target
, result
, 0);
3874 /* Expand the library call ourselves using a stabilized argument
3875 list to avoid re-evaluating the function's arguments twice. */
3876 #ifdef HAVE_cmpstrnsi
3879 fndecl
= get_callee_fndecl (exp
);
3880 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3881 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3882 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3883 return expand_call (fn
, target
, target
== const0_rtx
);
3889 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3890 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3891 the result in TARGET, if convenient. */
3894 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3895 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3897 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3899 if (!validate_arglist (exp
,
3900 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3903 /* If c_strlen can determine an expression for one of the string
3904 lengths, and it doesn't have side effects, then emit cmpstrnsi
3905 using length MIN(strlen(string)+1, arg3). */
3906 #ifdef HAVE_cmpstrnsi
3909 tree len
, len1
, len2
;
3910 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3913 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3914 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3915 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3917 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3918 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3919 enum machine_mode insn_mode
3920 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3922 len1
= c_strlen (arg1
, 1);
3923 len2
= c_strlen (arg2
, 1);
3926 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3928 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3930 /* If we don't have a constant length for the first, use the length
3931 of the second, if we know it. We don't require a constant for
3932 this case; some cost analysis could be done if both are available
3933 but neither is constant. For now, assume they're equally cheap,
3934 unless one has side effects. If both strings have constant lengths,
3941 else if (TREE_SIDE_EFFECTS (len1
))
3943 else if (TREE_SIDE_EFFECTS (len2
))
3945 else if (TREE_CODE (len1
) != INTEGER_CST
)
3947 else if (TREE_CODE (len2
) != INTEGER_CST
)
3949 else if (tree_int_cst_lt (len1
, len2
))
3954 /* If both arguments have side effects, we cannot optimize. */
3955 if (!len
|| TREE_SIDE_EFFECTS (len
))
3958 /* The actual new length parameter is MIN(len,arg3). */
3959 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
3960 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
3962 /* If we don't have POINTER_TYPE, call the function. */
3963 if (arg1_align
== 0 || arg2_align
== 0)
3966 /* Make a place to write the result of the instruction. */
3969 && REG_P (result
) && GET_MODE (result
) == insn_mode
3970 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3971 result
= gen_reg_rtx (insn_mode
);
3973 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3974 arg1
= builtin_save_expr (arg1
);
3975 arg2
= builtin_save_expr (arg2
);
3976 len
= builtin_save_expr (len
);
3978 arg1_rtx
= get_memory_rtx (arg1
, len
);
3979 arg2_rtx
= get_memory_rtx (arg2
, len
);
3980 arg3_rtx
= expand_normal (len
);
3981 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3982 GEN_INT (MIN (arg1_align
, arg2_align
)));
3987 /* Return the value in the proper mode for this function. */
3988 mode
= TYPE_MODE (TREE_TYPE (exp
));
3989 if (GET_MODE (result
) == mode
)
3992 return convert_to_mode (mode
, result
, 0);
3993 convert_move (target
, result
, 0);
3997 /* Expand the library call ourselves using a stabilized argument
3998 list to avoid re-evaluating the function's arguments twice. */
3999 fndecl
= get_callee_fndecl (exp
);
4000 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4002 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4003 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4004 return expand_call (fn
, target
, target
== const0_rtx
);
4010 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4011 if that's convenient. */
4014 expand_builtin_saveregs (void)
4018 /* Don't do __builtin_saveregs more than once in a function.
4019 Save the result of the first call and reuse it. */
4020 if (saveregs_value
!= 0)
4021 return saveregs_value
;
4023 /* When this function is called, it means that registers must be
4024 saved on entry to this function. So we migrate the call to the
4025 first insn of this function. */
4029 /* Do whatever the machine needs done in this case. */
4030 val
= targetm
.calls
.expand_builtin_saveregs ();
4035 saveregs_value
= val
;
4037 /* Put the insns after the NOTE that starts the function. If this
4038 is inside a start_sequence, make the outer-level insn chain current, so
4039 the code is placed at the start of the function. */
4040 push_topmost_sequence ();
4041 emit_insn_after (seq
, entry_of_function ());
4042 pop_topmost_sequence ();
4047 /* Expand a call to __builtin_next_arg. */
4050 expand_builtin_next_arg (void)
4052 /* Checking arguments is already done in fold_builtin_next_arg
4053 that must be called before this function. */
4054 return expand_binop (ptr_mode
, add_optab
,
4055 crtl
->args
.internal_arg_pointer
,
4056 crtl
->args
.arg_offset_rtx
,
4057 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4060 /* Make it easier for the backends by protecting the valist argument
4061 from multiple evaluations. */
4064 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4066 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4068 /* The current way of determining the type of valist is completely
4069 bogus. We should have the information on the va builtin instead. */
4071 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4073 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4075 if (TREE_SIDE_EFFECTS (valist
))
4076 valist
= save_expr (valist
);
4078 /* For this case, the backends will be expecting a pointer to
4079 vatype, but it's possible we've actually been given an array
4080 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4082 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4084 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4085 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4090 tree pt
= build_pointer_type (vatype
);
4094 if (! TREE_SIDE_EFFECTS (valist
))
4097 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4098 TREE_SIDE_EFFECTS (valist
) = 1;
4101 if (TREE_SIDE_EFFECTS (valist
))
4102 valist
= save_expr (valist
);
4103 valist
= fold_build2_loc (loc
, MEM_REF
,
4104 vatype
, valist
, build_int_cst (pt
, 0));
4110 /* The "standard" definition of va_list is void*. */
4113 std_build_builtin_va_list (void)
4115 return ptr_type_node
;
4118 /* The "standard" abi va_list is va_list_type_node. */
4121 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4123 return va_list_type_node
;
4126 /* The "standard" type of va_list is va_list_type_node. */
4129 std_canonical_va_list_type (tree type
)
4133 if (INDIRECT_REF_P (type
))
4134 type
= TREE_TYPE (type
);
4135 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4136 type
= TREE_TYPE (type
);
4137 wtype
= va_list_type_node
;
4139 /* Treat structure va_list types. */
4140 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4141 htype
= TREE_TYPE (htype
);
4142 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4144 /* If va_list is an array type, the argument may have decayed
4145 to a pointer type, e.g. by being passed to another function.
4146 In that case, unwrap both types so that we can compare the
4147 underlying records. */
4148 if (TREE_CODE (htype
) == ARRAY_TYPE
4149 || POINTER_TYPE_P (htype
))
4151 wtype
= TREE_TYPE (wtype
);
4152 htype
= TREE_TYPE (htype
);
4155 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4156 return va_list_type_node
;
4161 /* The "standard" implementation of va_start: just assign `nextarg' to
4165 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4167 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4168 convert_move (va_r
, nextarg
, 0);
4171 /* Expand EXP, a call to __builtin_va_start. */
4174 expand_builtin_va_start (tree exp
)
4178 location_t loc
= EXPR_LOCATION (exp
);
4180 if (call_expr_nargs (exp
) < 2)
4182 error_at (loc
, "too few arguments to function %<va_start%>");
4186 if (fold_builtin_next_arg (exp
, true))
4189 nextarg
= expand_builtin_next_arg ();
4190 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4192 if (targetm
.expand_builtin_va_start
)
4193 targetm
.expand_builtin_va_start (valist
, nextarg
);
4195 std_expand_builtin_va_start (valist
, nextarg
);
4200 /* The "standard" implementation of va_arg: read the value from the
4201 current (padded) address and increment by the (padded) size. */
4204 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4207 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4208 unsigned HOST_WIDE_INT align
, boundary
;
4211 #ifdef ARGS_GROW_DOWNWARD
4212 /* All of the alignment and movement below is for args-grow-up machines.
4213 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4214 implement their own specialized gimplify_va_arg_expr routines. */
4218 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4220 type
= build_pointer_type (type
);
4222 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4223 boundary
= targetm
.calls
.function_arg_boundary (TYPE_MODE (type
), type
);
4225 /* When we align parameter on stack for caller, if the parameter
4226 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4227 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4228 here with caller. */
4229 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4230 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4232 boundary
/= BITS_PER_UNIT
;
4234 /* Hoist the valist value into a temporary for the moment. */
4235 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4237 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4238 requires greater alignment, we must perform dynamic alignment. */
4239 if (boundary
> align
4240 && !integer_zerop (TYPE_SIZE (type
)))
4242 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4243 fold_build_pointer_plus_hwi (valist_tmp
, boundary
- 1));
4244 gimplify_and_add (t
, pre_p
);
4246 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4247 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (valist
),
4249 build_int_cst (TREE_TYPE (valist
), -boundary
)));
4250 gimplify_and_add (t
, pre_p
);
4255 /* If the actual alignment is less than the alignment of the type,
4256 adjust the type accordingly so that we don't assume strict alignment
4257 when dereferencing the pointer. */
4258 boundary
*= BITS_PER_UNIT
;
4259 if (boundary
< TYPE_ALIGN (type
))
4261 type
= build_variant_type_copy (type
);
4262 TYPE_ALIGN (type
) = boundary
;
4265 /* Compute the rounded size of the type. */
4266 type_size
= size_in_bytes (type
);
4267 rounded_size
= round_up (type_size
, align
);
4269 /* Reduce rounded_size so it's sharable with the postqueue. */
4270 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4274 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4276 /* Small args are padded downward. */
4277 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
4278 rounded_size
, size_int (align
));
4279 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4280 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4281 addr
= fold_build_pointer_plus (addr
, t
);
4284 /* Compute new value for AP. */
4285 t
= fold_build_pointer_plus (valist_tmp
, rounded_size
);
4286 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4287 gimplify_and_add (t
, pre_p
);
4289 addr
= fold_convert (build_pointer_type (type
), addr
);
4292 addr
= build_va_arg_indirect_ref (addr
);
4294 return build_va_arg_indirect_ref (addr
);
4297 /* Build an indirect-ref expression over the given TREE, which represents a
4298 piece of a va_arg() expansion. */
4300 build_va_arg_indirect_ref (tree addr
)
4302 addr
= build_simple_mem_ref_loc (EXPR_LOCATION (addr
), addr
);
4304 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4310 /* Return a dummy expression of type TYPE in order to keep going after an
4314 dummy_object (tree type
)
4316 tree t
= build_int_cst (build_pointer_type (type
), 0);
4317 return build2 (MEM_REF
, type
, t
, t
);
4320 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4321 builtin function, but a very special sort of operator. */
4323 enum gimplify_status
4324 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4326 tree promoted_type
, have_va_type
;
4327 tree valist
= TREE_OPERAND (*expr_p
, 0);
4328 tree type
= TREE_TYPE (*expr_p
);
4330 location_t loc
= EXPR_LOCATION (*expr_p
);
4332 /* Verify that valist is of the proper type. */
4333 have_va_type
= TREE_TYPE (valist
);
4334 if (have_va_type
== error_mark_node
)
4336 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4338 if (have_va_type
== NULL_TREE
)
4340 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4344 /* Generate a diagnostic for requesting data of a type that cannot
4345 be passed through `...' due to type promotion at the call site. */
4346 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4349 static bool gave_help
;
4352 /* Unfortunately, this is merely undefined, rather than a constraint
4353 violation, so we cannot make this an error. If this call is never
4354 executed, the program is still strictly conforming. */
4355 warned
= warning_at (loc
, 0,
4356 "%qT is promoted to %qT when passed through %<...%>",
4357 type
, promoted_type
);
4358 if (!gave_help
&& warned
)
4361 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4362 promoted_type
, type
);
4365 /* We can, however, treat "undefined" any way we please.
4366 Call abort to encourage the user to fix the program. */
4368 inform (loc
, "if this code is reached, the program will abort");
4369 /* Before the abort, allow the evaluation of the va_list
4370 expression to exit or longjmp. */
4371 gimplify_and_add (valist
, pre_p
);
4372 t
= build_call_expr_loc (loc
,
4373 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
4374 gimplify_and_add (t
, pre_p
);
4376 /* This is dead code, but go ahead and finish so that the
4377 mode of the result comes out right. */
4378 *expr_p
= dummy_object (type
);
4383 /* Make it easier for the backends by protecting the valist argument
4384 from multiple evaluations. */
4385 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4387 /* For this case, the backends will be expecting a pointer to
4388 TREE_TYPE (abi), but it's possible we've
4389 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4391 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4393 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4394 valist
= fold_convert_loc (loc
, p1
,
4395 build_fold_addr_expr_loc (loc
, valist
));
4398 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4401 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4403 if (!targetm
.gimplify_va_arg_expr
)
4404 /* FIXME: Once most targets are converted we should merely
4405 assert this is non-null. */
4408 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4413 /* Expand EXP, a call to __builtin_va_end. */
4416 expand_builtin_va_end (tree exp
)
4418 tree valist
= CALL_EXPR_ARG (exp
, 0);
4420 /* Evaluate for side effects, if needed. I hate macros that don't
4422 if (TREE_SIDE_EFFECTS (valist
))
4423 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4428 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4429 builtin rather than just as an assignment in stdarg.h because of the
4430 nastiness of array-type va_list types. */
4433 expand_builtin_va_copy (tree exp
)
4436 location_t loc
= EXPR_LOCATION (exp
);
4438 dst
= CALL_EXPR_ARG (exp
, 0);
4439 src
= CALL_EXPR_ARG (exp
, 1);
4441 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4442 src
= stabilize_va_list_loc (loc
, src
, 0);
4444 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4446 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4448 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4449 TREE_SIDE_EFFECTS (t
) = 1;
4450 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4454 rtx dstb
, srcb
, size
;
4456 /* Evaluate to pointers. */
4457 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4458 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4459 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4460 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4462 dstb
= convert_memory_address (Pmode
, dstb
);
4463 srcb
= convert_memory_address (Pmode
, srcb
);
4465 /* "Dereference" to BLKmode memories. */
4466 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4467 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4468 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4469 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4470 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4471 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4474 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4480 /* Expand a call to one of the builtin functions __builtin_frame_address or
4481 __builtin_return_address. */
4484 expand_builtin_frame_address (tree fndecl
, tree exp
)
4486 /* The argument must be a nonnegative integer constant.
4487 It counts the number of frames to scan up the stack.
4488 The value is the return address saved in that frame. */
4489 if (call_expr_nargs (exp
) == 0)
4490 /* Warning about missing arg was already issued. */
4492 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4494 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4495 error ("invalid argument to %<__builtin_frame_address%>");
4497 error ("invalid argument to %<__builtin_return_address%>");
4503 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4504 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4506 /* Some ports cannot access arbitrary stack frames. */
4509 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4510 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4512 warning (0, "unsupported argument to %<__builtin_return_address%>");
4516 /* For __builtin_frame_address, return what we've got. */
4517 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4521 && ! CONSTANT_P (tem
))
4522 tem
= copy_addr_to_reg (tem
);
4527 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4528 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4529 is the same as for allocate_dynamic_stack_space. */
4532 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4538 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4539 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4541 /* Emit normal call if we use mudflap. */
4546 = (alloca_with_align
4547 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4548 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4553 /* Compute the argument. */
4554 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4556 /* Compute the alignment. */
4557 align
= (alloca_with_align
4558 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4559 : BIGGEST_ALIGNMENT
);
4561 /* Allocate the desired space. */
4562 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4563 result
= convert_memory_address (ptr_mode
, result
);
4568 /* Expand a call to bswap builtin in EXP.
4569 Return NULL_RTX if a normal call should be emitted rather than expanding the
4570 function in-line. If convenient, the result should be placed in TARGET.
4571 SUBTARGET may be used as the target for computing one of EXP's operands. */
4574 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4580 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4583 arg
= CALL_EXPR_ARG (exp
, 0);
4584 op0
= expand_expr (arg
,
4585 subtarget
&& GET_MODE (subtarget
) == target_mode
4586 ? subtarget
: NULL_RTX
,
4587 target_mode
, EXPAND_NORMAL
);
4588 if (GET_MODE (op0
) != target_mode
)
4589 op0
= convert_to_mode (target_mode
, op0
, 1);
4591 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4593 gcc_assert (target
);
4595 return convert_to_mode (target_mode
, target
, 1);
4598 /* Expand a call to a unary builtin in EXP.
4599 Return NULL_RTX if a normal call should be emitted rather than expanding the
4600 function in-line. If convenient, the result should be placed in TARGET.
4601 SUBTARGET may be used as the target for computing one of EXP's operands. */
4604 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4605 rtx subtarget
, optab op_optab
)
4609 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4612 /* Compute the argument. */
4613 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4615 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4616 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4617 VOIDmode
, EXPAND_NORMAL
);
4618 /* Compute op, into TARGET if possible.
4619 Set TARGET to wherever the result comes back. */
4620 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4621 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4622 gcc_assert (target
);
4624 return convert_to_mode (target_mode
, target
, 0);
4627 /* Expand a call to __builtin_expect. We just return our argument
4628 as the builtin_expect semantic should've been already executed by
4629 tree branch prediction pass. */
4632 expand_builtin_expect (tree exp
, rtx target
)
4636 if (call_expr_nargs (exp
) < 2)
4638 arg
= CALL_EXPR_ARG (exp
, 0);
4640 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4641 /* When guessing was done, the hints should be already stripped away. */
4642 gcc_assert (!flag_guess_branch_prob
4643 || optimize
== 0 || seen_error ());
4647 /* Expand a call to __builtin_assume_aligned. We just return our first
4648 argument as the builtin_assume_aligned semantic should've been already
4652 expand_builtin_assume_aligned (tree exp
, rtx target
)
4654 if (call_expr_nargs (exp
) < 2)
4656 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4658 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4659 && (call_expr_nargs (exp
) < 3
4660 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4665 expand_builtin_trap (void)
4669 emit_insn (gen_trap ());
4672 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4676 /* Expand a call to __builtin_unreachable. We do nothing except emit
4677 a barrier saying that control flow will not pass here.
4679 It is the responsibility of the program being compiled to ensure
4680 that control flow does never reach __builtin_unreachable. */
4682 expand_builtin_unreachable (void)
4687 /* Expand EXP, a call to fabs, fabsf or fabsl.
4688 Return NULL_RTX if a normal call should be emitted rather than expanding
4689 the function inline. If convenient, the result should be placed
4690 in TARGET. SUBTARGET may be used as the target for computing
4694 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4696 enum machine_mode mode
;
4700 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4703 arg
= CALL_EXPR_ARG (exp
, 0);
4704 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4705 mode
= TYPE_MODE (TREE_TYPE (arg
));
4706 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4707 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4710 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4711 Return NULL is a normal call should be emitted rather than expanding the
4712 function inline. If convenient, the result should be placed in TARGET.
4713 SUBTARGET may be used as the target for computing the operand. */
4716 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4721 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4724 arg
= CALL_EXPR_ARG (exp
, 0);
4725 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4727 arg
= CALL_EXPR_ARG (exp
, 1);
4728 op1
= expand_normal (arg
);
4730 return expand_copysign (op0
, op1
, target
);
4733 /* Create a new constant string literal and return a char* pointer to it.
4734 The STRING_CST value is the LEN characters at STR. */
4736 build_string_literal (int len
, const char *str
)
4738 tree t
, elem
, index
, type
;
4740 t
= build_string (len
, str
);
4741 elem
= build_type_variant (char_type_node
, 1, 0);
4742 index
= build_index_type (size_int (len
- 1));
4743 type
= build_array_type (elem
, index
);
4744 TREE_TYPE (t
) = type
;
4745 TREE_CONSTANT (t
) = 1;
4746 TREE_READONLY (t
) = 1;
4747 TREE_STATIC (t
) = 1;
4749 type
= build_pointer_type (elem
);
4750 t
= build1 (ADDR_EXPR
, type
,
4751 build4 (ARRAY_REF
, elem
,
4752 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4756 /* Expand a call to __builtin___clear_cache. */
4759 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4761 #ifndef HAVE_clear_cache
4762 #ifdef CLEAR_INSN_CACHE
4763 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4764 does something. Just do the default expansion to a call to
4768 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4769 does nothing. There is no need to call it. Do nothing. */
4771 #endif /* CLEAR_INSN_CACHE */
4773 /* We have a "clear_cache" insn, and it will handle everything. */
4775 rtx begin_rtx
, end_rtx
;
4777 /* We must not expand to a library call. If we did, any
4778 fallback library function in libgcc that might contain a call to
4779 __builtin___clear_cache() would recurse infinitely. */
4780 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4782 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4786 if (HAVE_clear_cache
)
4788 struct expand_operand ops
[2];
4790 begin
= CALL_EXPR_ARG (exp
, 0);
4791 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4793 end
= CALL_EXPR_ARG (exp
, 1);
4794 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4796 create_address_operand (&ops
[0], begin_rtx
);
4797 create_address_operand (&ops
[1], end_rtx
);
4798 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4802 #endif /* HAVE_clear_cache */
4805 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4808 round_trampoline_addr (rtx tramp
)
4810 rtx temp
, addend
, mask
;
4812 /* If we don't need too much alignment, we'll have been guaranteed
4813 proper alignment by get_trampoline_type. */
4814 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4817 /* Round address up to desired boundary. */
4818 temp
= gen_reg_rtx (Pmode
);
4819 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
4820 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
4822 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4823 temp
, 0, OPTAB_LIB_WIDEN
);
4824 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4825 temp
, 0, OPTAB_LIB_WIDEN
);
4831 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4833 tree t_tramp
, t_func
, t_chain
;
4834 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4836 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4837 POINTER_TYPE
, VOID_TYPE
))
4840 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4841 t_func
= CALL_EXPR_ARG (exp
, 1);
4842 t_chain
= CALL_EXPR_ARG (exp
, 2);
4844 r_tramp
= expand_normal (t_tramp
);
4845 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4846 MEM_NOTRAP_P (m_tramp
) = 1;
4848 /* If ONSTACK, the TRAMP argument should be the address of a field
4849 within the local function's FRAME decl. Either way, let's see if
4850 we can fill in the MEM_ATTRs for this memory. */
4851 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4852 set_mem_attributes_minus_bitpos (m_tramp
, TREE_OPERAND (t_tramp
, 0),
4855 /* Creator of a heap trampoline is responsible for making sure the
4856 address is aligned to at least STACK_BOUNDARY. Normally malloc
4857 will ensure this anyhow. */
4858 tmp
= round_trampoline_addr (r_tramp
);
4861 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4862 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4863 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4866 /* The FUNC argument should be the address of the nested function.
4867 Extract the actual function decl to pass to the hook. */
4868 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4869 t_func
= TREE_OPERAND (t_func
, 0);
4870 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4872 r_chain
= expand_normal (t_chain
);
4874 /* Generate insns to initialize the trampoline. */
4875 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4879 trampolines_created
= 1;
4881 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4882 "trampoline generated for nested function %qD", t_func
);
4889 expand_builtin_adjust_trampoline (tree exp
)
4893 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4896 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4897 tramp
= round_trampoline_addr (tramp
);
4898 if (targetm
.calls
.trampoline_adjust_address
)
4899 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4904 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4905 function. The function first checks whether the back end provides
4906 an insn to implement signbit for the respective mode. If not, it
4907 checks whether the floating point format of the value is such that
4908 the sign bit can be extracted. If that is not the case, the
4909 function returns NULL_RTX to indicate that a normal call should be
4910 emitted rather than expanding the function in-line. EXP is the
4911 expression that is a call to the builtin function; if convenient,
4912 the result should be placed in TARGET. */
4914 expand_builtin_signbit (tree exp
, rtx target
)
4916 const struct real_format
*fmt
;
4917 enum machine_mode fmode
, imode
, rmode
;
4920 enum insn_code icode
;
4922 location_t loc
= EXPR_LOCATION (exp
);
4924 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4927 arg
= CALL_EXPR_ARG (exp
, 0);
4928 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4929 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4930 fmt
= REAL_MODE_FORMAT (fmode
);
4932 arg
= builtin_save_expr (arg
);
4934 /* Expand the argument yielding a RTX expression. */
4935 temp
= expand_normal (arg
);
4937 /* Check if the back end provides an insn that handles signbit for the
4939 icode
= optab_handler (signbit_optab
, fmode
);
4940 if (icode
!= CODE_FOR_nothing
)
4942 rtx last
= get_last_insn ();
4943 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4944 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4946 delete_insns_since (last
);
4949 /* For floating point formats without a sign bit, implement signbit
4951 bitpos
= fmt
->signbit_ro
;
4954 /* But we can't do this if the format supports signed zero. */
4955 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4958 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4959 build_real (TREE_TYPE (arg
), dconst0
));
4960 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4963 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4965 imode
= int_mode_for_mode (fmode
);
4966 if (imode
== BLKmode
)
4968 temp
= gen_lowpart (imode
, temp
);
4973 /* Handle targets with different FP word orders. */
4974 if (FLOAT_WORDS_BIG_ENDIAN
)
4975 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4977 word
= bitpos
/ BITS_PER_WORD
;
4978 temp
= operand_subword_force (temp
, word
, fmode
);
4979 bitpos
= bitpos
% BITS_PER_WORD
;
4982 /* Force the intermediate word_mode (or narrower) result into a
4983 register. This avoids attempting to create paradoxical SUBREGs
4984 of floating point modes below. */
4985 temp
= force_reg (imode
, temp
);
4987 /* If the bitpos is within the "result mode" lowpart, the operation
4988 can be implement with a single bitwise AND. Otherwise, we need
4989 a right shift and an AND. */
4991 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4993 double_int mask
= double_int_zero
.set_bit (bitpos
);
4995 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4996 temp
= gen_lowpart (rmode
, temp
);
4997 temp
= expand_binop (rmode
, and_optab
, temp
,
4998 immed_double_int_const (mask
, rmode
),
4999 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5003 /* Perform a logical right shift to place the signbit in the least
5004 significant bit, then truncate the result to the desired mode
5005 and mask just this bit. */
5006 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5007 temp
= gen_lowpart (rmode
, temp
);
5008 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5009 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5015 /* Expand fork or exec calls. TARGET is the desired target of the
5016 call. EXP is the call. FN is the
5017 identificator of the actual function. IGNORE is nonzero if the
5018 value is to be ignored. */
5021 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5026 /* If we are not profiling, just call the function. */
5027 if (!profile_arc_flag
)
5030 /* Otherwise call the wrapper. This should be equivalent for the rest of
5031 compiler, so the code does not diverge, and the wrapper may run the
5032 code necessary for keeping the profiling sane. */
5034 switch (DECL_FUNCTION_CODE (fn
))
5037 id
= get_identifier ("__gcov_fork");
5040 case BUILT_IN_EXECL
:
5041 id
= get_identifier ("__gcov_execl");
5044 case BUILT_IN_EXECV
:
5045 id
= get_identifier ("__gcov_execv");
5048 case BUILT_IN_EXECLP
:
5049 id
= get_identifier ("__gcov_execlp");
5052 case BUILT_IN_EXECLE
:
5053 id
= get_identifier ("__gcov_execle");
5056 case BUILT_IN_EXECVP
:
5057 id
= get_identifier ("__gcov_execvp");
5060 case BUILT_IN_EXECVE
:
5061 id
= get_identifier ("__gcov_execve");
5068 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5069 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5070 DECL_EXTERNAL (decl
) = 1;
5071 TREE_PUBLIC (decl
) = 1;
5072 DECL_ARTIFICIAL (decl
) = 1;
5073 TREE_NOTHROW (decl
) = 1;
5074 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5075 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5076 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5077 return expand_call (call
, target
, ignore
);
5082 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5083 the pointer in these functions is void*, the tree optimizers may remove
5084 casts. The mode computed in expand_builtin isn't reliable either, due
5085 to __sync_bool_compare_and_swap.
5087 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5088 group of builtins. This gives us log2 of the mode size. */
5090 static inline enum machine_mode
5091 get_builtin_sync_mode (int fcode_diff
)
5093 /* The size is not negotiable, so ask not to get BLKmode in return
5094 if the target indicates that a smaller size would be better. */
5095 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5098 /* Expand the memory expression LOC and return the appropriate memory operand
5099 for the builtin_sync operations. */
5102 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5106 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5107 addr
= convert_memory_address (Pmode
, addr
);
5109 /* Note that we explicitly do not want any alias information for this
5110 memory, so that we kill all other live memories. Otherwise we don't
5111 satisfy the full barrier semantics of the intrinsic. */
5112 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5114 /* The alignment needs to be at least according to that of the mode. */
5115 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5116 get_pointer_alignment (loc
)));
5117 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5118 MEM_VOLATILE_P (mem
) = 1;
5123 /* Make sure an argument is in the right mode.
5124 EXP is the tree argument.
5125 MODE is the mode it should be in. */
5128 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5131 enum machine_mode old_mode
;
5133 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5134 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5135 of CONST_INTs, where we know the old_mode only from the call argument. */
5137 old_mode
= GET_MODE (val
);
5138 if (old_mode
== VOIDmode
)
5139 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5140 val
= convert_modes (mode
, old_mode
, val
, 1);
5145 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5146 EXP is the CALL_EXPR. CODE is the rtx code
5147 that corresponds to the arithmetic or logical operation from the name;
5148 an exception here is that NOT actually means NAND. TARGET is an optional
5149 place for us to store the results; AFTER is true if this is the
5150 fetch_and_xxx form. */
5153 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5154 enum rtx_code code
, bool after
,
5158 location_t loc
= EXPR_LOCATION (exp
);
5160 if (code
== NOT
&& warn_sync_nand
)
5162 tree fndecl
= get_callee_fndecl (exp
);
5163 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5165 static bool warned_f_a_n
, warned_n_a_f
;
5169 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5170 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5171 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5172 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5173 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5177 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5178 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5179 warned_f_a_n
= true;
5182 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5183 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5184 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5185 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5186 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5190 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5191 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5192 warned_n_a_f
= true;
5200 /* Expand the operands. */
5201 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5202 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5204 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5208 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5209 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5210 true if this is the boolean form. TARGET is a place for us to store the
5211 results; this is NOT optional if IS_BOOL is true. */
5214 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5215 bool is_bool
, rtx target
)
5217 rtx old_val
, new_val
, mem
;
5220 /* Expand the operands. */
5221 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5222 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5223 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5225 pbool
= poval
= NULL
;
5226 if (target
!= const0_rtx
)
5233 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5234 false, MEMMODEL_SEQ_CST
,
5241 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5242 general form is actually an atomic exchange, and some targets only
5243 support a reduced form with the second argument being a constant 1.
5244 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5248 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5253 /* Expand the operands. */
5254 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5255 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5257 return expand_sync_lock_test_and_set (target
, mem
, val
);
5260 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5263 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5267 /* Expand the operands. */
5268 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5270 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5273 /* Given an integer representing an ``enum memmodel'', verify its
5274 correctness and return the memory model enum. */
5276 static enum memmodel
5277 get_memmodel (tree exp
)
5280 unsigned HOST_WIDE_INT val
;
5282 /* If the parameter is not a constant, it's a run time value so we'll just
5283 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5284 if (TREE_CODE (exp
) != INTEGER_CST
)
5285 return MEMMODEL_SEQ_CST
;
5287 op
= expand_normal (exp
);
5290 if (targetm
.memmodel_check
)
5291 val
= targetm
.memmodel_check (val
);
5292 else if (val
& ~MEMMODEL_MASK
)
5294 warning (OPT_Winvalid_memory_model
,
5295 "Unknown architecture specifier in memory model to builtin.");
5296 return MEMMODEL_SEQ_CST
;
5299 if ((INTVAL(op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5301 warning (OPT_Winvalid_memory_model
,
5302 "invalid memory model argument to builtin");
5303 return MEMMODEL_SEQ_CST
;
5306 return (enum memmodel
) val
;
5309 /* Expand the __atomic_exchange intrinsic:
5310 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5311 EXP is the CALL_EXPR.
5312 TARGET is an optional place for us to store the results. */
5315 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5318 enum memmodel model
;
5320 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5321 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5323 error ("invalid memory model for %<__atomic_exchange%>");
5327 if (!flag_inline_atomics
)
5330 /* Expand the operands. */
5331 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5332 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5334 return expand_atomic_exchange (target
, mem
, val
, model
);
5337 /* Expand the __atomic_compare_exchange intrinsic:
5338 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5339 TYPE desired, BOOL weak,
5340 enum memmodel success,
5341 enum memmodel failure)
5342 EXP is the CALL_EXPR.
5343 TARGET is an optional place for us to store the results. */
5346 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5349 rtx expect
, desired
, mem
, oldval
;
5350 enum memmodel success
, failure
;
5354 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5355 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5357 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5358 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5360 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5364 if (failure
> success
)
5366 error ("failure memory model cannot be stronger than success "
5367 "memory model for %<__atomic_compare_exchange%>");
5371 if (!flag_inline_atomics
)
5374 /* Expand the operands. */
5375 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5377 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5378 expect
= convert_memory_address (Pmode
, expect
);
5379 expect
= gen_rtx_MEM (mode
, expect
);
5380 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5382 weak
= CALL_EXPR_ARG (exp
, 3);
5384 if (host_integerp (weak
, 0) && tree_low_cst (weak
, 0) != 0)
5388 if (!expand_atomic_compare_and_swap ((target
== const0_rtx
? NULL
: &target
),
5389 &oldval
, mem
, oldval
, desired
,
5390 is_weak
, success
, failure
))
5393 if (oldval
!= expect
)
5394 emit_move_insn (expect
, oldval
);
5399 /* Expand the __atomic_load intrinsic:
5400 TYPE __atomic_load (TYPE *object, enum memmodel)
5401 EXP is the CALL_EXPR.
5402 TARGET is an optional place for us to store the results. */
5405 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5408 enum memmodel model
;
5410 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5411 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5412 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5414 error ("invalid memory model for %<__atomic_load%>");
5418 if (!flag_inline_atomics
)
5421 /* Expand the operand. */
5422 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5424 return expand_atomic_load (target
, mem
, model
);
5428 /* Expand the __atomic_store intrinsic:
5429 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5430 EXP is the CALL_EXPR.
5431 TARGET is an optional place for us to store the results. */
5434 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5437 enum memmodel model
;
5439 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5440 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5441 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5442 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5444 error ("invalid memory model for %<__atomic_store%>");
5448 if (!flag_inline_atomics
)
5451 /* Expand the operands. */
5452 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5453 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5455 return expand_atomic_store (mem
, val
, model
, false);
5458 /* Expand the __atomic_fetch_XXX intrinsic:
5459 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5460 EXP is the CALL_EXPR.
5461 TARGET is an optional place for us to store the results.
5462 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5463 FETCH_AFTER is true if returning the result of the operation.
5464 FETCH_AFTER is false if returning the value before the operation.
5465 IGNORE is true if the result is not used.
5466 EXT_CALL is the correct builtin for an external call if this cannot be
5467 resolved to an instruction sequence. */
5470 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5471 enum rtx_code code
, bool fetch_after
,
5472 bool ignore
, enum built_in_function ext_call
)
5475 enum memmodel model
;
5479 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5481 /* Expand the operands. */
5482 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5483 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5485 /* Only try generating instructions if inlining is turned on. */
5486 if (flag_inline_atomics
)
5488 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5493 /* Return if a different routine isn't needed for the library call. */
5494 if (ext_call
== BUILT_IN_NONE
)
5497 /* Change the call to the specified function. */
5498 fndecl
= get_callee_fndecl (exp
);
5499 addr
= CALL_EXPR_FN (exp
);
5502 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5503 TREE_OPERAND (addr
, 0) = builtin_decl_explicit(ext_call
);
5505 /* Expand the call here so we can emit trailing code. */
5506 ret
= expand_call (exp
, target
, ignore
);
5508 /* Replace the original function just in case it matters. */
5509 TREE_OPERAND (addr
, 0) = fndecl
;
5511 /* Then issue the arithmetic correction to return the right result. */
5516 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5518 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5521 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5528 #ifndef HAVE_atomic_clear
5529 # define HAVE_atomic_clear 0
5530 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5533 /* Expand an atomic clear operation.
5534 void _atomic_clear (BOOL *obj, enum memmodel)
5535 EXP is the call expression. */
5538 expand_builtin_atomic_clear (tree exp
)
5540 enum machine_mode mode
;
5542 enum memmodel model
;
5544 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5545 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5546 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5548 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5549 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5551 error ("invalid memory model for %<__atomic_store%>");
5555 if (HAVE_atomic_clear
)
5557 emit_insn (gen_atomic_clear (mem
, model
));
5561 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5562 Failing that, a store is issued by __atomic_store. The only way this can
5563 fail is if the bool type is larger than a word size. Unlikely, but
5564 handle it anyway for completeness. Assume a single threaded model since
5565 there is no atomic support in this case, and no barriers are required. */
5566 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5568 emit_move_insn (mem
, const0_rtx
);
5572 /* Expand an atomic test_and_set operation.
5573 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5577 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5580 enum memmodel model
;
5581 enum machine_mode mode
;
5583 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5584 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5585 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5587 return expand_atomic_test_and_set (target
, mem
, model
);
5591 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5592 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5595 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5598 enum machine_mode mode
;
5599 unsigned int mode_align
, type_align
;
5601 if (TREE_CODE (arg0
) != INTEGER_CST
)
5604 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5605 mode
= mode_for_size (size
, MODE_INT
, 0);
5606 mode_align
= GET_MODE_ALIGNMENT (mode
);
5608 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5609 type_align
= mode_align
;
5612 tree ttype
= TREE_TYPE (arg1
);
5614 /* This function is usually invoked and folded immediately by the front
5615 end before anything else has a chance to look at it. The pointer
5616 parameter at this point is usually cast to a void *, so check for that
5617 and look past the cast. */
5618 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5619 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5620 arg1
= TREE_OPERAND (arg1
, 0);
5622 ttype
= TREE_TYPE (arg1
);
5623 gcc_assert (POINTER_TYPE_P (ttype
));
5625 /* Get the underlying type of the object. */
5626 ttype
= TREE_TYPE (ttype
);
5627 type_align
= TYPE_ALIGN (ttype
);
5630 /* If the object has smaller alignment, the the lock free routines cannot
5632 if (type_align
< mode_align
)
5633 return boolean_false_node
;
5635 /* Check if a compare_and_swap pattern exists for the mode which represents
5636 the required size. The pattern is not allowed to fail, so the existence
5637 of the pattern indicates support is present. */
5638 if (can_compare_and_swap_p (mode
, true))
5639 return boolean_true_node
;
5641 return boolean_false_node
;
5644 /* Return true if the parameters to call EXP represent an object which will
5645 always generate lock free instructions. The first argument represents the
5646 size of the object, and the second parameter is a pointer to the object
5647 itself. If NULL is passed for the object, then the result is based on
5648 typical alignment for an object of the specified size. Otherwise return
5652 expand_builtin_atomic_always_lock_free (tree exp
)
5655 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5656 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5658 if (TREE_CODE (arg0
) != INTEGER_CST
)
5660 error ("non-constant argument 1 to __atomic_always_lock_free");
5664 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5665 if (size
== boolean_true_node
)
5670 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5671 is lock free on this architecture. */
5674 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5676 if (!flag_inline_atomics
)
5679 /* If it isn't always lock free, don't generate a result. */
5680 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5681 return boolean_true_node
;
5686 /* Return true if the parameters to call EXP represent an object which will
5687 always generate lock free instructions. The first argument represents the
5688 size of the object, and the second parameter is a pointer to the object
5689 itself. If NULL is passed for the object, then the result is based on
5690 typical alignment for an object of the specified size. Otherwise return
5694 expand_builtin_atomic_is_lock_free (tree exp
)
5697 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5698 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5700 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5702 error ("non-integer argument 1 to __atomic_is_lock_free");
5706 if (!flag_inline_atomics
)
5709 /* If the value is known at compile time, return the RTX for it. */
5710 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5711 if (size
== boolean_true_node
)
5717 /* Expand the __atomic_thread_fence intrinsic:
5718 void __atomic_thread_fence (enum memmodel)
5719 EXP is the CALL_EXPR. */
5722 expand_builtin_atomic_thread_fence (tree exp
)
5724 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5725 expand_mem_thread_fence (model
);
5728 /* Expand the __atomic_signal_fence intrinsic:
5729 void __atomic_signal_fence (enum memmodel)
5730 EXP is the CALL_EXPR. */
5733 expand_builtin_atomic_signal_fence (tree exp
)
5735 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5736 expand_mem_signal_fence (model
);
5739 /* Expand the __sync_synchronize intrinsic. */
5742 expand_builtin_sync_synchronize (void)
5744 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5748 expand_builtin_thread_pointer (tree exp
, rtx target
)
5750 enum insn_code icode
;
5751 if (!validate_arglist (exp
, VOID_TYPE
))
5753 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5754 if (icode
!= CODE_FOR_nothing
)
5756 struct expand_operand op
;
5757 if (!REG_P (target
) || GET_MODE (target
) != Pmode
)
5758 target
= gen_reg_rtx (Pmode
);
5759 create_output_operand (&op
, target
, Pmode
);
5760 expand_insn (icode
, 1, &op
);
5763 error ("__builtin_thread_pointer is not supported on this target");
5768 expand_builtin_set_thread_pointer (tree exp
)
5770 enum insn_code icode
;
5771 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5773 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5774 if (icode
!= CODE_FOR_nothing
)
5776 struct expand_operand op
;
5777 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5778 Pmode
, EXPAND_NORMAL
);
5779 create_input_operand (&op
, val
, Pmode
);
5780 expand_insn (icode
, 1, &op
);
5783 error ("__builtin_set_thread_pointer is not supported on this target");
5787 /* Expand an expression EXP that calls a built-in function,
5788 with result going to TARGET if that's convenient
5789 (and in mode MODE if that's convenient).
5790 SUBTARGET may be used as the target for computing one of EXP's operands.
5791 IGNORE is nonzero if the value is to be ignored. */
5794 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5797 tree fndecl
= get_callee_fndecl (exp
);
5798 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5799 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5802 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5803 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5805 /* When not optimizing, generate calls to library functions for a certain
5808 && !called_as_built_in (fndecl
)
5809 && fcode
!= BUILT_IN_ALLOCA
5810 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5811 && fcode
!= BUILT_IN_FREE
)
5812 return expand_call (exp
, target
, ignore
);
5814 /* The built-in function expanders test for target == const0_rtx
5815 to determine whether the function's result will be ignored. */
5817 target
= const0_rtx
;
5819 /* If the result of a pure or const built-in function is ignored, and
5820 none of its arguments are volatile, we can avoid expanding the
5821 built-in call and just evaluate the arguments for side-effects. */
5822 if (target
== const0_rtx
5823 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5824 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5826 bool volatilep
= false;
5828 call_expr_arg_iterator iter
;
5830 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5831 if (TREE_THIS_VOLATILE (arg
))
5839 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5840 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5847 CASE_FLT_FN (BUILT_IN_FABS
):
5848 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5853 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5854 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5859 /* Just do a normal library call if we were unable to fold
5861 CASE_FLT_FN (BUILT_IN_CABS
):
5864 CASE_FLT_FN (BUILT_IN_EXP
):
5865 CASE_FLT_FN (BUILT_IN_EXP10
):
5866 CASE_FLT_FN (BUILT_IN_POW10
):
5867 CASE_FLT_FN (BUILT_IN_EXP2
):
5868 CASE_FLT_FN (BUILT_IN_EXPM1
):
5869 CASE_FLT_FN (BUILT_IN_LOGB
):
5870 CASE_FLT_FN (BUILT_IN_LOG
):
5871 CASE_FLT_FN (BUILT_IN_LOG10
):
5872 CASE_FLT_FN (BUILT_IN_LOG2
):
5873 CASE_FLT_FN (BUILT_IN_LOG1P
):
5874 CASE_FLT_FN (BUILT_IN_TAN
):
5875 CASE_FLT_FN (BUILT_IN_ASIN
):
5876 CASE_FLT_FN (BUILT_IN_ACOS
):
5877 CASE_FLT_FN (BUILT_IN_ATAN
):
5878 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5879 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5880 because of possible accuracy problems. */
5881 if (! flag_unsafe_math_optimizations
)
5883 CASE_FLT_FN (BUILT_IN_SQRT
):
5884 CASE_FLT_FN (BUILT_IN_FLOOR
):
5885 CASE_FLT_FN (BUILT_IN_CEIL
):
5886 CASE_FLT_FN (BUILT_IN_TRUNC
):
5887 CASE_FLT_FN (BUILT_IN_ROUND
):
5888 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5889 CASE_FLT_FN (BUILT_IN_RINT
):
5890 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5895 CASE_FLT_FN (BUILT_IN_FMA
):
5896 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5901 CASE_FLT_FN (BUILT_IN_ILOGB
):
5902 if (! flag_unsafe_math_optimizations
)
5904 CASE_FLT_FN (BUILT_IN_ISINF
):
5905 CASE_FLT_FN (BUILT_IN_FINITE
):
5906 case BUILT_IN_ISFINITE
:
5907 case BUILT_IN_ISNORMAL
:
5908 target
= expand_builtin_interclass_mathfn (exp
, target
);
5913 CASE_FLT_FN (BUILT_IN_ICEIL
):
5914 CASE_FLT_FN (BUILT_IN_LCEIL
):
5915 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5916 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5917 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5918 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5919 target
= expand_builtin_int_roundingfn (exp
, target
);
5924 CASE_FLT_FN (BUILT_IN_IRINT
):
5925 CASE_FLT_FN (BUILT_IN_LRINT
):
5926 CASE_FLT_FN (BUILT_IN_LLRINT
):
5927 CASE_FLT_FN (BUILT_IN_IROUND
):
5928 CASE_FLT_FN (BUILT_IN_LROUND
):
5929 CASE_FLT_FN (BUILT_IN_LLROUND
):
5930 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5935 CASE_FLT_FN (BUILT_IN_POWI
):
5936 target
= expand_builtin_powi (exp
, target
);
5941 CASE_FLT_FN (BUILT_IN_ATAN2
):
5942 CASE_FLT_FN (BUILT_IN_LDEXP
):
5943 CASE_FLT_FN (BUILT_IN_SCALB
):
5944 CASE_FLT_FN (BUILT_IN_SCALBN
):
5945 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5946 if (! flag_unsafe_math_optimizations
)
5949 CASE_FLT_FN (BUILT_IN_FMOD
):
5950 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5951 CASE_FLT_FN (BUILT_IN_DREM
):
5952 CASE_FLT_FN (BUILT_IN_POW
):
5953 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5958 CASE_FLT_FN (BUILT_IN_CEXPI
):
5959 target
= expand_builtin_cexpi (exp
, target
);
5960 gcc_assert (target
);
5963 CASE_FLT_FN (BUILT_IN_SIN
):
5964 CASE_FLT_FN (BUILT_IN_COS
):
5965 if (! flag_unsafe_math_optimizations
)
5967 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5972 CASE_FLT_FN (BUILT_IN_SINCOS
):
5973 if (! flag_unsafe_math_optimizations
)
5975 target
= expand_builtin_sincos (exp
);
5980 case BUILT_IN_APPLY_ARGS
:
5981 return expand_builtin_apply_args ();
5983 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5984 FUNCTION with a copy of the parameters described by
5985 ARGUMENTS, and ARGSIZE. It returns a block of memory
5986 allocated on the stack into which is stored all the registers
5987 that might possibly be used for returning the result of a
5988 function. ARGUMENTS is the value returned by
5989 __builtin_apply_args. ARGSIZE is the number of bytes of
5990 arguments that must be copied. ??? How should this value be
5991 computed? We'll also need a safe worst case value for varargs
5993 case BUILT_IN_APPLY
:
5994 if (!validate_arglist (exp
, POINTER_TYPE
,
5995 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5996 && !validate_arglist (exp
, REFERENCE_TYPE
,
5997 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6003 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6004 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6005 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6007 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6010 /* __builtin_return (RESULT) causes the function to return the
6011 value described by RESULT. RESULT is address of the block of
6012 memory returned by __builtin_apply. */
6013 case BUILT_IN_RETURN
:
6014 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6015 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6018 case BUILT_IN_SAVEREGS
:
6019 return expand_builtin_saveregs ();
6021 case BUILT_IN_VA_ARG_PACK
:
6022 /* All valid uses of __builtin_va_arg_pack () are removed during
6024 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6027 case BUILT_IN_VA_ARG_PACK_LEN
:
6028 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6030 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6033 /* Return the address of the first anonymous stack arg. */
6034 case BUILT_IN_NEXT_ARG
:
6035 if (fold_builtin_next_arg (exp
, false))
6037 return expand_builtin_next_arg ();
6039 case BUILT_IN_CLEAR_CACHE
:
6040 target
= expand_builtin___clear_cache (exp
);
6045 case BUILT_IN_CLASSIFY_TYPE
:
6046 return expand_builtin_classify_type (exp
);
6048 case BUILT_IN_CONSTANT_P
:
6051 case BUILT_IN_FRAME_ADDRESS
:
6052 case BUILT_IN_RETURN_ADDRESS
:
6053 return expand_builtin_frame_address (fndecl
, exp
);
6055 /* Returns the address of the area where the structure is returned.
6057 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6058 if (call_expr_nargs (exp
) != 0
6059 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6060 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6063 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6065 case BUILT_IN_ALLOCA
:
6066 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6067 /* If the allocation stems from the declaration of a variable-sized
6068 object, it cannot accumulate. */
6069 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6074 case BUILT_IN_STACK_SAVE
:
6075 return expand_stack_save ();
6077 case BUILT_IN_STACK_RESTORE
:
6078 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6081 case BUILT_IN_BSWAP16
:
6082 case BUILT_IN_BSWAP32
:
6083 case BUILT_IN_BSWAP64
:
6084 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6089 CASE_INT_FN (BUILT_IN_FFS
):
6090 case BUILT_IN_FFSIMAX
:
6091 target
= expand_builtin_unop (target_mode
, exp
, target
,
6092 subtarget
, ffs_optab
);
6097 CASE_INT_FN (BUILT_IN_CLZ
):
6098 case BUILT_IN_CLZIMAX
:
6099 target
= expand_builtin_unop (target_mode
, exp
, target
,
6100 subtarget
, clz_optab
);
6105 CASE_INT_FN (BUILT_IN_CTZ
):
6106 case BUILT_IN_CTZIMAX
:
6107 target
= expand_builtin_unop (target_mode
, exp
, target
,
6108 subtarget
, ctz_optab
);
6113 CASE_INT_FN (BUILT_IN_CLRSB
):
6114 case BUILT_IN_CLRSBIMAX
:
6115 target
= expand_builtin_unop (target_mode
, exp
, target
,
6116 subtarget
, clrsb_optab
);
6121 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6122 case BUILT_IN_POPCOUNTIMAX
:
6123 target
= expand_builtin_unop (target_mode
, exp
, target
,
6124 subtarget
, popcount_optab
);
6129 CASE_INT_FN (BUILT_IN_PARITY
):
6130 case BUILT_IN_PARITYIMAX
:
6131 target
= expand_builtin_unop (target_mode
, exp
, target
,
6132 subtarget
, parity_optab
);
6137 case BUILT_IN_STRLEN
:
6138 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6143 case BUILT_IN_STRCPY
:
6144 target
= expand_builtin_strcpy (exp
, target
);
6149 case BUILT_IN_STRNCPY
:
6150 target
= expand_builtin_strncpy (exp
, target
);
6155 case BUILT_IN_STPCPY
:
6156 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6161 case BUILT_IN_MEMCPY
:
6162 target
= expand_builtin_memcpy (exp
, target
);
6167 case BUILT_IN_MEMPCPY
:
6168 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6173 case BUILT_IN_MEMSET
:
6174 target
= expand_builtin_memset (exp
, target
, mode
);
6179 case BUILT_IN_BZERO
:
6180 target
= expand_builtin_bzero (exp
);
6185 case BUILT_IN_STRCMP
:
6186 target
= expand_builtin_strcmp (exp
, target
);
6191 case BUILT_IN_STRNCMP
:
6192 target
= expand_builtin_strncmp (exp
, target
, mode
);
6198 case BUILT_IN_MEMCMP
:
6199 target
= expand_builtin_memcmp (exp
, target
, mode
);
6204 case BUILT_IN_SETJMP
:
6205 /* This should have been lowered to the builtins below. */
6208 case BUILT_IN_SETJMP_SETUP
:
6209 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6210 and the receiver label. */
6211 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6213 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6214 VOIDmode
, EXPAND_NORMAL
);
6215 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6216 rtx label_r
= label_rtx (label
);
6218 /* This is copied from the handling of non-local gotos. */
6219 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6220 nonlocal_goto_handler_labels
6221 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6222 nonlocal_goto_handler_labels
);
6223 /* ??? Do not let expand_label treat us as such since we would
6224 not want to be both on the list of non-local labels and on
6225 the list of forced labels. */
6226 FORCED_LABEL (label
) = 0;
6231 case BUILT_IN_SETJMP_DISPATCHER
:
6232 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6233 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6235 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6236 rtx label_r
= label_rtx (label
);
6238 /* Remove the dispatcher label from the list of non-local labels
6239 since the receiver labels have been added to it above. */
6240 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6245 case BUILT_IN_SETJMP_RECEIVER
:
6246 /* __builtin_setjmp_receiver is passed the receiver label. */
6247 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6249 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6250 rtx label_r
= label_rtx (label
);
6252 expand_builtin_setjmp_receiver (label_r
);
6257 /* __builtin_longjmp is passed a pointer to an array of five words.
6258 It's similar to the C library longjmp function but works with
6259 __builtin_setjmp above. */
6260 case BUILT_IN_LONGJMP
:
6261 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6263 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6264 VOIDmode
, EXPAND_NORMAL
);
6265 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6267 if (value
!= const1_rtx
)
6269 error ("%<__builtin_longjmp%> second argument must be 1");
6273 expand_builtin_longjmp (buf_addr
, value
);
6278 case BUILT_IN_NONLOCAL_GOTO
:
6279 target
= expand_builtin_nonlocal_goto (exp
);
6284 /* This updates the setjmp buffer that is its argument with the value
6285 of the current stack pointer. */
6286 case BUILT_IN_UPDATE_SETJMP_BUF
:
6287 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6290 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6292 expand_builtin_update_setjmp_buf (buf_addr
);
6298 expand_builtin_trap ();
6301 case BUILT_IN_UNREACHABLE
:
6302 expand_builtin_unreachable ();
6305 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6306 case BUILT_IN_SIGNBITD32
:
6307 case BUILT_IN_SIGNBITD64
:
6308 case BUILT_IN_SIGNBITD128
:
6309 target
= expand_builtin_signbit (exp
, target
);
6314 /* Various hooks for the DWARF 2 __throw routine. */
6315 case BUILT_IN_UNWIND_INIT
:
6316 expand_builtin_unwind_init ();
6318 case BUILT_IN_DWARF_CFA
:
6319 return virtual_cfa_rtx
;
6320 #ifdef DWARF2_UNWIND_INFO
6321 case BUILT_IN_DWARF_SP_COLUMN
:
6322 return expand_builtin_dwarf_sp_column ();
6323 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6324 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6327 case BUILT_IN_FROB_RETURN_ADDR
:
6328 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6329 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6330 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6331 case BUILT_IN_EH_RETURN
:
6332 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6333 CALL_EXPR_ARG (exp
, 1));
6335 #ifdef EH_RETURN_DATA_REGNO
6336 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6337 return expand_builtin_eh_return_data_regno (exp
);
6339 case BUILT_IN_EXTEND_POINTER
:
6340 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6341 case BUILT_IN_EH_POINTER
:
6342 return expand_builtin_eh_pointer (exp
);
6343 case BUILT_IN_EH_FILTER
:
6344 return expand_builtin_eh_filter (exp
);
6345 case BUILT_IN_EH_COPY_VALUES
:
6346 return expand_builtin_eh_copy_values (exp
);
6348 case BUILT_IN_VA_START
:
6349 return expand_builtin_va_start (exp
);
6350 case BUILT_IN_VA_END
:
6351 return expand_builtin_va_end (exp
);
6352 case BUILT_IN_VA_COPY
:
6353 return expand_builtin_va_copy (exp
);
6354 case BUILT_IN_EXPECT
:
6355 return expand_builtin_expect (exp
, target
);
6356 case BUILT_IN_ASSUME_ALIGNED
:
6357 return expand_builtin_assume_aligned (exp
, target
);
6358 case BUILT_IN_PREFETCH
:
6359 expand_builtin_prefetch (exp
);
6362 case BUILT_IN_INIT_TRAMPOLINE
:
6363 return expand_builtin_init_trampoline (exp
, true);
6364 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6365 return expand_builtin_init_trampoline (exp
, false);
6366 case BUILT_IN_ADJUST_TRAMPOLINE
:
6367 return expand_builtin_adjust_trampoline (exp
);
6370 case BUILT_IN_EXECL
:
6371 case BUILT_IN_EXECV
:
6372 case BUILT_IN_EXECLP
:
6373 case BUILT_IN_EXECLE
:
6374 case BUILT_IN_EXECVP
:
6375 case BUILT_IN_EXECVE
:
6376 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6381 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6382 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6383 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6384 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6385 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6386 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6387 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6392 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6393 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6394 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6395 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6396 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6397 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6398 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6403 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6404 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6405 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6406 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6407 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6408 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6409 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6414 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6415 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6416 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6417 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6418 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6419 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6420 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6425 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6426 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6427 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6428 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6429 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6430 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6431 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6436 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6437 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6438 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6439 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6440 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6441 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6442 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6447 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6448 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6449 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6450 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6451 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6452 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6453 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6458 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6459 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6460 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6461 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6462 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6463 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6464 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6469 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6470 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6471 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6472 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6473 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6474 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6475 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6480 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6481 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6482 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6483 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6484 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6485 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6486 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6491 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6492 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6493 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6494 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6495 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6496 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6497 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6502 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6503 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6504 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6505 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6506 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6507 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6508 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6513 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6514 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6515 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6516 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6517 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6518 if (mode
== VOIDmode
)
6519 mode
= TYPE_MODE (boolean_type_node
);
6520 if (!target
|| !register_operand (target
, mode
))
6521 target
= gen_reg_rtx (mode
);
6523 mode
= get_builtin_sync_mode
6524 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6525 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6530 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6531 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6532 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6533 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6534 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6535 mode
= get_builtin_sync_mode
6536 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6537 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6542 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6543 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6544 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6545 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6546 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6547 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6548 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6553 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6554 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6555 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6556 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6557 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6558 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6559 expand_builtin_sync_lock_release (mode
, exp
);
6562 case BUILT_IN_SYNC_SYNCHRONIZE
:
6563 expand_builtin_sync_synchronize ();
6566 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6567 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6568 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6569 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6570 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6571 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6572 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6577 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6578 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6579 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6580 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6581 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6583 unsigned int nargs
, z
;
6587 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6588 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6592 /* If this is turned into an external library call, the weak parameter
6593 must be dropped to match the expected parameter list. */
6594 nargs
= call_expr_nargs (exp
);
6595 vec
= VEC_alloc (tree
, gc
, nargs
- 1);
6596 for (z
= 0; z
< 3; z
++)
6597 VEC_quick_push (tree
, vec
, CALL_EXPR_ARG (exp
, z
));
6598 /* Skip the boolean weak parameter. */
6599 for (z
= 4; z
< 6; z
++)
6600 VEC_quick_push (tree
, vec
, CALL_EXPR_ARG (exp
, z
));
6601 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6605 case BUILT_IN_ATOMIC_LOAD_1
:
6606 case BUILT_IN_ATOMIC_LOAD_2
:
6607 case BUILT_IN_ATOMIC_LOAD_4
:
6608 case BUILT_IN_ATOMIC_LOAD_8
:
6609 case BUILT_IN_ATOMIC_LOAD_16
:
6610 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6611 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6616 case BUILT_IN_ATOMIC_STORE_1
:
6617 case BUILT_IN_ATOMIC_STORE_2
:
6618 case BUILT_IN_ATOMIC_STORE_4
:
6619 case BUILT_IN_ATOMIC_STORE_8
:
6620 case BUILT_IN_ATOMIC_STORE_16
:
6621 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6622 target
= expand_builtin_atomic_store (mode
, exp
);
6627 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6628 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6629 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6630 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6631 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6633 enum built_in_function lib
;
6634 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6635 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6636 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6637 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6643 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6644 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6645 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6646 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6647 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6649 enum built_in_function lib
;
6650 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6651 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6652 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6653 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6659 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6660 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6661 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6662 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6663 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6665 enum built_in_function lib
;
6666 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6667 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6668 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6669 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6675 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6676 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6677 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6678 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6679 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6681 enum built_in_function lib
;
6682 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6683 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6684 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6685 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6691 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6692 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6693 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6694 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6695 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6697 enum built_in_function lib
;
6698 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6699 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6700 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6701 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6707 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6708 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6709 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6710 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6711 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6713 enum built_in_function lib
;
6714 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6715 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6716 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6717 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6723 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6724 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6725 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6726 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6727 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6728 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6729 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6730 ignore
, BUILT_IN_NONE
);
6735 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6736 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6737 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6738 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6739 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6740 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6741 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6742 ignore
, BUILT_IN_NONE
);
6747 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6748 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6749 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6750 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6751 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6752 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6753 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6754 ignore
, BUILT_IN_NONE
);
6759 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6760 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6761 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6762 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6763 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6764 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6765 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6766 ignore
, BUILT_IN_NONE
);
6771 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6772 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6773 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6774 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6775 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6776 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6777 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6778 ignore
, BUILT_IN_NONE
);
6783 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6784 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6785 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6786 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6787 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6788 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6789 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6790 ignore
, BUILT_IN_NONE
);
6795 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6796 return expand_builtin_atomic_test_and_set (exp
, target
);
6798 case BUILT_IN_ATOMIC_CLEAR
:
6799 return expand_builtin_atomic_clear (exp
);
6801 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6802 return expand_builtin_atomic_always_lock_free (exp
);
6804 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6805 target
= expand_builtin_atomic_is_lock_free (exp
);
6810 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6811 expand_builtin_atomic_thread_fence (exp
);
6814 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6815 expand_builtin_atomic_signal_fence (exp
);
6818 case BUILT_IN_OBJECT_SIZE
:
6819 return expand_builtin_object_size (exp
);
6821 case BUILT_IN_MEMCPY_CHK
:
6822 case BUILT_IN_MEMPCPY_CHK
:
6823 case BUILT_IN_MEMMOVE_CHK
:
6824 case BUILT_IN_MEMSET_CHK
:
6825 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6830 case BUILT_IN_STRCPY_CHK
:
6831 case BUILT_IN_STPCPY_CHK
:
6832 case BUILT_IN_STRNCPY_CHK
:
6833 case BUILT_IN_STPNCPY_CHK
:
6834 case BUILT_IN_STRCAT_CHK
:
6835 case BUILT_IN_STRNCAT_CHK
:
6836 case BUILT_IN_SNPRINTF_CHK
:
6837 case BUILT_IN_VSNPRINTF_CHK
:
6838 maybe_emit_chk_warning (exp
, fcode
);
6841 case BUILT_IN_SPRINTF_CHK
:
6842 case BUILT_IN_VSPRINTF_CHK
:
6843 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6847 if (warn_free_nonheap_object
)
6848 maybe_emit_free_warning (exp
);
6851 case BUILT_IN_THREAD_POINTER
:
6852 return expand_builtin_thread_pointer (exp
, target
);
6854 case BUILT_IN_SET_THREAD_POINTER
:
6855 expand_builtin_set_thread_pointer (exp
);
6858 default: /* just do library call, if unknown builtin */
6862 /* The switch statement above can drop through to cause the function
6863 to be called normally. */
6864 return expand_call (exp
, target
, ignore
);
6867 /* Determine whether a tree node represents a call to a built-in
6868 function. If the tree T is a call to a built-in function with
6869 the right number of arguments of the appropriate types, return
6870 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6871 Otherwise the return value is END_BUILTINS. */
6873 enum built_in_function
6874 builtin_mathfn_code (const_tree t
)
6876 const_tree fndecl
, arg
, parmlist
;
6877 const_tree argtype
, parmtype
;
6878 const_call_expr_arg_iterator iter
;
6880 if (TREE_CODE (t
) != CALL_EXPR
6881 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6882 return END_BUILTINS
;
6884 fndecl
= get_callee_fndecl (t
);
6885 if (fndecl
== NULL_TREE
6886 || TREE_CODE (fndecl
) != FUNCTION_DECL
6887 || ! DECL_BUILT_IN (fndecl
)
6888 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6889 return END_BUILTINS
;
6891 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6892 init_const_call_expr_arg_iterator (t
, &iter
);
6893 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6895 /* If a function doesn't take a variable number of arguments,
6896 the last element in the list will have type `void'. */
6897 parmtype
= TREE_VALUE (parmlist
);
6898 if (VOID_TYPE_P (parmtype
))
6900 if (more_const_call_expr_args_p (&iter
))
6901 return END_BUILTINS
;
6902 return DECL_FUNCTION_CODE (fndecl
);
6905 if (! more_const_call_expr_args_p (&iter
))
6906 return END_BUILTINS
;
6908 arg
= next_const_call_expr_arg (&iter
);
6909 argtype
= TREE_TYPE (arg
);
6911 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6913 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6914 return END_BUILTINS
;
6916 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6918 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6919 return END_BUILTINS
;
6921 else if (POINTER_TYPE_P (parmtype
))
6923 if (! POINTER_TYPE_P (argtype
))
6924 return END_BUILTINS
;
6926 else if (INTEGRAL_TYPE_P (parmtype
))
6928 if (! INTEGRAL_TYPE_P (argtype
))
6929 return END_BUILTINS
;
6932 return END_BUILTINS
;
6935 /* Variable-length argument list. */
6936 return DECL_FUNCTION_CODE (fndecl
);
6939 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6940 evaluate to a constant. */
6943 fold_builtin_constant_p (tree arg
)
6945 /* We return 1 for a numeric type that's known to be a constant
6946 value at compile-time or for an aggregate type that's a
6947 literal constant. */
6950 /* If we know this is a constant, emit the constant of one. */
6951 if (CONSTANT_CLASS_P (arg
)
6952 || (TREE_CODE (arg
) == CONSTRUCTOR
6953 && TREE_CONSTANT (arg
)))
6954 return integer_one_node
;
6955 if (TREE_CODE (arg
) == ADDR_EXPR
)
6957 tree op
= TREE_OPERAND (arg
, 0);
6958 if (TREE_CODE (op
) == STRING_CST
6959 || (TREE_CODE (op
) == ARRAY_REF
6960 && integer_zerop (TREE_OPERAND (op
, 1))
6961 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6962 return integer_one_node
;
6965 /* If this expression has side effects, show we don't know it to be a
6966 constant. Likewise if it's a pointer or aggregate type since in
6967 those case we only want literals, since those are only optimized
6968 when generating RTL, not later.
6969 And finally, if we are compiling an initializer, not code, we
6970 need to return a definite result now; there's not going to be any
6971 more optimization done. */
6972 if (TREE_SIDE_EFFECTS (arg
)
6973 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6974 || POINTER_TYPE_P (TREE_TYPE (arg
))
6976 || folding_initializer
)
6977 return integer_zero_node
;
6982 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6983 return it as a truthvalue. */
6986 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6988 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6990 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6991 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6992 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6993 pred_type
= TREE_VALUE (arg_types
);
6994 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6996 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6997 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6998 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
7000 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7001 build_int_cst (ret_type
, 0));
7004 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7005 NULL_TREE if no simplification is possible. */
7008 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
7010 tree inner
, fndecl
, inner_arg0
;
7011 enum tree_code code
;
7013 /* Distribute the expected value over short-circuiting operators.
7014 See through the cast from truthvalue_type_node to long. */
7016 while (TREE_CODE (inner_arg0
) == NOP_EXPR
7017 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7018 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7019 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7021 /* If this is a builtin_expect within a builtin_expect keep the
7022 inner one. See through a comparison against a constant. It
7023 might have been added to create a thruthvalue. */
7026 if (COMPARISON_CLASS_P (inner
)
7027 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7028 inner
= TREE_OPERAND (inner
, 0);
7030 if (TREE_CODE (inner
) == CALL_EXPR
7031 && (fndecl
= get_callee_fndecl (inner
))
7032 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7033 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7037 code
= TREE_CODE (inner
);
7038 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7040 tree op0
= TREE_OPERAND (inner
, 0);
7041 tree op1
= TREE_OPERAND (inner
, 1);
7043 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
7044 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
7045 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7047 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7050 /* If the argument isn't invariant then there's nothing else we can do. */
7051 if (!TREE_CONSTANT (inner_arg0
))
7054 /* If we expect that a comparison against the argument will fold to
7055 a constant return the constant. In practice, this means a true
7056 constant or the address of a non-weak symbol. */
7059 if (TREE_CODE (inner
) == ADDR_EXPR
)
7063 inner
= TREE_OPERAND (inner
, 0);
7065 while (TREE_CODE (inner
) == COMPONENT_REF
7066 || TREE_CODE (inner
) == ARRAY_REF
);
7067 if ((TREE_CODE (inner
) == VAR_DECL
7068 || TREE_CODE (inner
) == FUNCTION_DECL
)
7069 && DECL_WEAK (inner
))
7073 /* Otherwise, ARG0 already has the proper type for the return value. */
7077 /* Fold a call to __builtin_classify_type with argument ARG. */
7080 fold_builtin_classify_type (tree arg
)
7083 return build_int_cst (integer_type_node
, no_type_class
);
7085 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7088 /* Fold a call to __builtin_strlen with argument ARG. */
7091 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7093 if (!validate_arg (arg
, POINTER_TYPE
))
7097 tree len
= c_strlen (arg
, 0);
7100 return fold_convert_loc (loc
, type
, len
);
7106 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7109 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7111 REAL_VALUE_TYPE real
;
7113 /* __builtin_inff is intended to be usable to define INFINITY on all
7114 targets. If an infinity is not available, INFINITY expands "to a
7115 positive constant of type float that overflows at translation
7116 time", footnote "In this case, using INFINITY will violate the
7117 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7118 Thus we pedwarn to ensure this constraint violation is
7120 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7121 pedwarn (loc
, 0, "target format does not support infinity");
7124 return build_real (type
, real
);
7127 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7130 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7132 REAL_VALUE_TYPE real
;
7135 if (!validate_arg (arg
, POINTER_TYPE
))
7137 str
= c_getstr (arg
);
7141 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7144 return build_real (type
, real
);
7147 /* Return true if the floating point expression T has an integer value.
7148 We also allow +Inf, -Inf and NaN to be considered integer values. */
7151 integer_valued_real_p (tree t
)
7153 switch (TREE_CODE (t
))
7160 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7165 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7172 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7173 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7176 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7177 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7180 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7184 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7185 if (TREE_CODE (type
) == INTEGER_TYPE
)
7187 if (TREE_CODE (type
) == REAL_TYPE
)
7188 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7193 switch (builtin_mathfn_code (t
))
7195 CASE_FLT_FN (BUILT_IN_CEIL
):
7196 CASE_FLT_FN (BUILT_IN_FLOOR
):
7197 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7198 CASE_FLT_FN (BUILT_IN_RINT
):
7199 CASE_FLT_FN (BUILT_IN_ROUND
):
7200 CASE_FLT_FN (BUILT_IN_TRUNC
):
7203 CASE_FLT_FN (BUILT_IN_FMIN
):
7204 CASE_FLT_FN (BUILT_IN_FMAX
):
7205 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7206 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7219 /* FNDECL is assumed to be a builtin where truncation can be propagated
7220 across (for instance floor((double)f) == (double)floorf (f).
7221 Do the transformation for a call with argument ARG. */
7224 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7226 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7228 if (!validate_arg (arg
, REAL_TYPE
))
7231 /* Integer rounding functions are idempotent. */
7232 if (fcode
== builtin_mathfn_code (arg
))
7235 /* If argument is already integer valued, and we don't need to worry
7236 about setting errno, there's no need to perform rounding. */
7237 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7242 tree arg0
= strip_float_extensions (arg
);
7243 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7244 tree newtype
= TREE_TYPE (arg0
);
7247 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7248 && (decl
= mathfn_built_in (newtype
, fcode
)))
7249 return fold_convert_loc (loc
, ftype
,
7250 build_call_expr_loc (loc
, decl
, 1,
7251 fold_convert_loc (loc
,
7258 /* FNDECL is assumed to be builtin which can narrow the FP type of
7259 the argument, for instance lround((double)f) -> lroundf (f).
7260 Do the transformation for a call with argument ARG. */
7263 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7265 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7267 if (!validate_arg (arg
, REAL_TYPE
))
7270 /* If argument is already integer valued, and we don't need to worry
7271 about setting errno, there's no need to perform rounding. */
7272 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7273 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7274 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7278 tree ftype
= TREE_TYPE (arg
);
7279 tree arg0
= strip_float_extensions (arg
);
7280 tree newtype
= TREE_TYPE (arg0
);
7283 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7284 && (decl
= mathfn_built_in (newtype
, fcode
)))
7285 return build_call_expr_loc (loc
, decl
, 1,
7286 fold_convert_loc (loc
, newtype
, arg0
));
7289 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7290 sizeof (int) == sizeof (long). */
7291 if (TYPE_PRECISION (integer_type_node
)
7292 == TYPE_PRECISION (long_integer_type_node
))
7294 tree newfn
= NULL_TREE
;
7297 CASE_FLT_FN (BUILT_IN_ICEIL
):
7298 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7301 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7302 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7305 CASE_FLT_FN (BUILT_IN_IROUND
):
7306 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7309 CASE_FLT_FN (BUILT_IN_IRINT
):
7310 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7319 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7320 return fold_convert_loc (loc
,
7321 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7325 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7326 sizeof (long long) == sizeof (long). */
7327 if (TYPE_PRECISION (long_long_integer_type_node
)
7328 == TYPE_PRECISION (long_integer_type_node
))
7330 tree newfn
= NULL_TREE
;
7333 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7334 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7337 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7338 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7341 CASE_FLT_FN (BUILT_IN_LLROUND
):
7342 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7345 CASE_FLT_FN (BUILT_IN_LLRINT
):
7346 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7355 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7356 return fold_convert_loc (loc
,
7357 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7364 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7365 return type. Return NULL_TREE if no simplification can be made. */
7368 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7372 if (!validate_arg (arg
, COMPLEX_TYPE
)
7373 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7376 /* Calculate the result when the argument is a constant. */
7377 if (TREE_CODE (arg
) == COMPLEX_CST
7378 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7382 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7384 tree real
= TREE_OPERAND (arg
, 0);
7385 tree imag
= TREE_OPERAND (arg
, 1);
7387 /* If either part is zero, cabs is fabs of the other. */
7388 if (real_zerop (real
))
7389 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7390 if (real_zerop (imag
))
7391 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7393 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7394 if (flag_unsafe_math_optimizations
7395 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7397 const REAL_VALUE_TYPE sqrt2_trunc
7398 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7400 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7401 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7402 build_real (type
, sqrt2_trunc
));
7406 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7407 if (TREE_CODE (arg
) == NEGATE_EXPR
7408 || TREE_CODE (arg
) == CONJ_EXPR
)
7409 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7411 /* Don't do this when optimizing for size. */
7412 if (flag_unsafe_math_optimizations
7413 && optimize
&& optimize_function_for_speed_p (cfun
))
7415 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7417 if (sqrtfn
!= NULL_TREE
)
7419 tree rpart
, ipart
, result
;
7421 arg
= builtin_save_expr (arg
);
7423 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7424 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7426 rpart
= builtin_save_expr (rpart
);
7427 ipart
= builtin_save_expr (ipart
);
7429 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7430 fold_build2_loc (loc
, MULT_EXPR
, type
,
7432 fold_build2_loc (loc
, MULT_EXPR
, type
,
7435 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7442 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7443 complex tree type of the result. If NEG is true, the imaginary
7444 zero is negative. */
7447 build_complex_cproj (tree type
, bool neg
)
7449 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7453 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7454 build_real (TREE_TYPE (type
), rzero
));
7457 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7458 return type. Return NULL_TREE if no simplification can be made. */
7461 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7463 if (!validate_arg (arg
, COMPLEX_TYPE
)
7464 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7467 /* If there are no infinities, return arg. */
7468 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7469 return non_lvalue_loc (loc
, arg
);
7471 /* Calculate the result when the argument is a constant. */
7472 if (TREE_CODE (arg
) == COMPLEX_CST
)
7474 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7475 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7477 if (real_isinf (real
) || real_isinf (imag
))
7478 return build_complex_cproj (type
, imag
->sign
);
7482 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7484 tree real
= TREE_OPERAND (arg
, 0);
7485 tree imag
= TREE_OPERAND (arg
, 1);
7490 /* If the real part is inf and the imag part is known to be
7491 nonnegative, return (inf + 0i). Remember side-effects are
7492 possible in the imag part. */
7493 if (TREE_CODE (real
) == REAL_CST
7494 && real_isinf (TREE_REAL_CST_PTR (real
))
7495 && tree_expr_nonnegative_p (imag
))
7496 return omit_one_operand_loc (loc
, type
,
7497 build_complex_cproj (type
, false),
7500 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7501 Remember side-effects are possible in the real part. */
7502 if (TREE_CODE (imag
) == REAL_CST
7503 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7505 omit_one_operand_loc (loc
, type
,
7506 build_complex_cproj (type
, TREE_REAL_CST_PTR
7507 (imag
)->sign
), arg
);
7513 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7514 Return NULL_TREE if no simplification can be made. */
7517 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7520 enum built_in_function fcode
;
7523 if (!validate_arg (arg
, REAL_TYPE
))
7526 /* Calculate the result when the argument is a constant. */
7527 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7530 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7531 fcode
= builtin_mathfn_code (arg
);
7532 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7534 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7535 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7536 CALL_EXPR_ARG (arg
, 0),
7537 build_real (type
, dconsthalf
));
7538 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7541 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7542 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7544 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7548 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7550 /* The inner root was either sqrt or cbrt. */
7551 /* This was a conditional expression but it triggered a bug
7553 REAL_VALUE_TYPE dconstroot
;
7554 if (BUILTIN_SQRT_P (fcode
))
7555 dconstroot
= dconsthalf
;
7557 dconstroot
= dconst_third ();
7559 /* Adjust for the outer root. */
7560 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7561 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7562 tree_root
= build_real (type
, dconstroot
);
7563 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7567 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7568 if (flag_unsafe_math_optimizations
7569 && (fcode
== BUILT_IN_POW
7570 || fcode
== BUILT_IN_POWF
7571 || fcode
== BUILT_IN_POWL
))
7573 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7574 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7575 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7577 if (!tree_expr_nonnegative_p (arg0
))
7578 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7579 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7580 build_real (type
, dconsthalf
));
7581 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7587 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7588 Return NULL_TREE if no simplification can be made. */
7591 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7593 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7596 if (!validate_arg (arg
, REAL_TYPE
))
7599 /* Calculate the result when the argument is a constant. */
7600 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7603 if (flag_unsafe_math_optimizations
)
7605 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7606 if (BUILTIN_EXPONENT_P (fcode
))
7608 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7609 const REAL_VALUE_TYPE third_trunc
=
7610 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7611 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7612 CALL_EXPR_ARG (arg
, 0),
7613 build_real (type
, third_trunc
));
7614 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7617 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7618 if (BUILTIN_SQRT_P (fcode
))
7620 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7624 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7626 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7628 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7629 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7630 tree_root
= build_real (type
, dconstroot
);
7631 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7635 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7636 if (BUILTIN_CBRT_P (fcode
))
7638 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7639 if (tree_expr_nonnegative_p (arg0
))
7641 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7646 REAL_VALUE_TYPE dconstroot
;
7648 real_arithmetic (&dconstroot
, MULT_EXPR
,
7649 dconst_third_ptr (), dconst_third_ptr ());
7650 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7651 tree_root
= build_real (type
, dconstroot
);
7652 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7657 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7658 if (fcode
== BUILT_IN_POW
7659 || fcode
== BUILT_IN_POWF
7660 || fcode
== BUILT_IN_POWL
)
7662 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7663 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7664 if (tree_expr_nonnegative_p (arg00
))
7666 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7667 const REAL_VALUE_TYPE dconstroot
7668 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7669 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7670 build_real (type
, dconstroot
));
7671 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7678 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7679 TYPE is the type of the return value. Return NULL_TREE if no
7680 simplification can be made. */
7683 fold_builtin_cos (location_t loc
,
7684 tree arg
, tree type
, tree fndecl
)
7688 if (!validate_arg (arg
, REAL_TYPE
))
7691 /* Calculate the result when the argument is a constant. */
7692 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7695 /* Optimize cos(-x) into cos (x). */
7696 if ((narg
= fold_strip_sign_ops (arg
)))
7697 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7702 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7703 Return NULL_TREE if no simplification can be made. */
7706 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7708 if (validate_arg (arg
, REAL_TYPE
))
7712 /* Calculate the result when the argument is a constant. */
7713 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7716 /* Optimize cosh(-x) into cosh (x). */
7717 if ((narg
= fold_strip_sign_ops (arg
)))
7718 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7724 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7725 argument ARG. TYPE is the type of the return value. Return
7726 NULL_TREE if no simplification can be made. */
7729 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7732 if (validate_arg (arg
, COMPLEX_TYPE
)
7733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7737 /* Calculate the result when the argument is a constant. */
7738 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7741 /* Optimize fn(-x) into fn(x). */
7742 if ((tmp
= fold_strip_sign_ops (arg
)))
7743 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7749 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7750 Return NULL_TREE if no simplification can be made. */
7753 fold_builtin_tan (tree arg
, tree type
)
7755 enum built_in_function fcode
;
7758 if (!validate_arg (arg
, REAL_TYPE
))
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7765 /* Optimize tan(atan(x)) = x. */
7766 fcode
= builtin_mathfn_code (arg
);
7767 if (flag_unsafe_math_optimizations
7768 && (fcode
== BUILT_IN_ATAN
7769 || fcode
== BUILT_IN_ATANF
7770 || fcode
== BUILT_IN_ATANL
))
7771 return CALL_EXPR_ARG (arg
, 0);
7776 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7777 NULL_TREE if no simplification can be made. */
7780 fold_builtin_sincos (location_t loc
,
7781 tree arg0
, tree arg1
, tree arg2
)
7786 if (!validate_arg (arg0
, REAL_TYPE
)
7787 || !validate_arg (arg1
, POINTER_TYPE
)
7788 || !validate_arg (arg2
, POINTER_TYPE
))
7791 type
= TREE_TYPE (arg0
);
7793 /* Calculate the result when the argument is a constant. */
7794 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7797 /* Canonicalize sincos to cexpi. */
7798 if (!TARGET_C99_FUNCTIONS
)
7800 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7804 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7805 call
= builtin_save_expr (call
);
7807 return build2 (COMPOUND_EXPR
, void_type_node
,
7808 build2 (MODIFY_EXPR
, void_type_node
,
7809 build_fold_indirect_ref_loc (loc
, arg1
),
7810 build1 (IMAGPART_EXPR
, type
, call
)),
7811 build2 (MODIFY_EXPR
, void_type_node
,
7812 build_fold_indirect_ref_loc (loc
, arg2
),
7813 build1 (REALPART_EXPR
, type
, call
)));
7816 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7817 NULL_TREE if no simplification can be made. */
7820 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7823 tree realp
, imagp
, ifn
;
7826 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7827 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7830 /* Calculate the result when the argument is a constant. */
7831 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7834 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7836 /* In case we can figure out the real part of arg0 and it is constant zero
7838 if (!TARGET_C99_FUNCTIONS
)
7840 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7844 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7845 && real_zerop (realp
))
7847 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7848 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7851 /* In case we can easily decompose real and imaginary parts split cexp
7852 to exp (r) * cexpi (i). */
7853 if (flag_unsafe_math_optimizations
7856 tree rfn
, rcall
, icall
;
7858 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7862 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7866 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7867 icall
= builtin_save_expr (icall
);
7868 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7869 rcall
= builtin_save_expr (rcall
);
7870 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7871 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7873 fold_build1_loc (loc
, REALPART_EXPR
,
7875 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7877 fold_build1_loc (loc
, IMAGPART_EXPR
,
7884 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7885 Return NULL_TREE if no simplification can be made. */
7888 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7890 if (!validate_arg (arg
, REAL_TYPE
))
7893 /* Optimize trunc of constant value. */
7894 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7896 REAL_VALUE_TYPE r
, x
;
7897 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7899 x
= TREE_REAL_CST (arg
);
7900 real_trunc (&r
, TYPE_MODE (type
), &x
);
7901 return build_real (type
, r
);
7904 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7907 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7908 Return NULL_TREE if no simplification can be made. */
7911 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7913 if (!validate_arg (arg
, REAL_TYPE
))
7916 /* Optimize floor of constant value. */
7917 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7921 x
= TREE_REAL_CST (arg
);
7922 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7924 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7927 real_floor (&r
, TYPE_MODE (type
), &x
);
7928 return build_real (type
, r
);
7932 /* Fold floor (x) where x is nonnegative to trunc (x). */
7933 if (tree_expr_nonnegative_p (arg
))
7935 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7937 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7940 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7943 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7944 Return NULL_TREE if no simplification can be made. */
7947 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7949 if (!validate_arg (arg
, REAL_TYPE
))
7952 /* Optimize ceil of constant value. */
7953 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7957 x
= TREE_REAL_CST (arg
);
7958 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7960 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7963 real_ceil (&r
, TYPE_MODE (type
), &x
);
7964 return build_real (type
, r
);
7968 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7971 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7972 Return NULL_TREE if no simplification can be made. */
7975 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7977 if (!validate_arg (arg
, REAL_TYPE
))
7980 /* Optimize round of constant value. */
7981 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7985 x
= TREE_REAL_CST (arg
);
7986 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7988 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7991 real_round (&r
, TYPE_MODE (type
), &x
);
7992 return build_real (type
, r
);
7996 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7999 /* Fold function call to builtin lround, lroundf or lroundl (or the
8000 corresponding long long versions) and other rounding functions. ARG
8001 is the argument to the call. Return NULL_TREE if no simplification
8005 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8007 if (!validate_arg (arg
, REAL_TYPE
))
8010 /* Optimize lround of constant value. */
8011 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8013 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8015 if (real_isfinite (&x
))
8017 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8018 tree ftype
= TREE_TYPE (arg
);
8022 switch (DECL_FUNCTION_CODE (fndecl
))
8024 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8025 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8026 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8027 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8030 CASE_FLT_FN (BUILT_IN_ICEIL
):
8031 CASE_FLT_FN (BUILT_IN_LCEIL
):
8032 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8033 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8036 CASE_FLT_FN (BUILT_IN_IROUND
):
8037 CASE_FLT_FN (BUILT_IN_LROUND
):
8038 CASE_FLT_FN (BUILT_IN_LLROUND
):
8039 real_round (&r
, TYPE_MODE (ftype
), &x
);
8046 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
8047 if (double_int_fits_to_tree_p (itype
, val
))
8048 return double_int_to_tree (itype
, val
);
8052 switch (DECL_FUNCTION_CODE (fndecl
))
8054 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8055 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8056 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8057 if (tree_expr_nonnegative_p (arg
))
8058 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8059 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8064 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8067 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8068 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8069 the argument to the call. Return NULL_TREE if no simplification can
8073 fold_builtin_bitop (tree fndecl
, tree arg
)
8075 if (!validate_arg (arg
, INTEGER_TYPE
))
8078 /* Optimize for constant argument. */
8079 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8081 HOST_WIDE_INT hi
, width
, result
;
8082 unsigned HOST_WIDE_INT lo
;
8085 type
= TREE_TYPE (arg
);
8086 width
= TYPE_PRECISION (type
);
8087 lo
= TREE_INT_CST_LOW (arg
);
8089 /* Clear all the bits that are beyond the type's precision. */
8090 if (width
> HOST_BITS_PER_WIDE_INT
)
8092 hi
= TREE_INT_CST_HIGH (arg
);
8093 if (width
< HOST_BITS_PER_DOUBLE_INT
)
8094 hi
&= ~((unsigned HOST_WIDE_INT
) (-1)
8095 << (width
- HOST_BITS_PER_WIDE_INT
));
8100 if (width
< HOST_BITS_PER_WIDE_INT
)
8101 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
8104 switch (DECL_FUNCTION_CODE (fndecl
))
8106 CASE_INT_FN (BUILT_IN_FFS
):
8108 result
= ffs_hwi (lo
);
8110 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
8115 CASE_INT_FN (BUILT_IN_CLZ
):
8117 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8119 result
= width
- floor_log2 (lo
) - 1;
8120 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8124 CASE_INT_FN (BUILT_IN_CTZ
):
8126 result
= ctz_hwi (lo
);
8128 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
8129 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8133 CASE_INT_FN (BUILT_IN_CLRSB
):
8134 if (width
> HOST_BITS_PER_WIDE_INT
8135 && (hi
& ((unsigned HOST_WIDE_INT
) 1
8136 << (width
- HOST_BITS_PER_WIDE_INT
- 1))) != 0)
8138 hi
= ~hi
& ~((unsigned HOST_WIDE_INT
) (-1)
8139 << (width
- HOST_BITS_PER_WIDE_INT
- 1));
8142 else if (width
<= HOST_BITS_PER_WIDE_INT
8143 && (lo
& ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
8144 lo
= ~lo
& ~((unsigned HOST_WIDE_INT
) (-1) << (width
- 1));
8146 result
= width
- floor_log2 (hi
) - 2 - HOST_BITS_PER_WIDE_INT
;
8148 result
= width
- floor_log2 (lo
) - 2;
8153 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8156 result
++, lo
&= lo
- 1;
8158 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8161 CASE_INT_FN (BUILT_IN_PARITY
):
8164 result
++, lo
&= lo
- 1;
8166 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8174 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8180 /* Fold function call to builtin_bswap and the short, long and long long
8181 variants. Return NULL_TREE if no simplification can be made. */
8183 fold_builtin_bswap (tree fndecl
, tree arg
)
8185 if (! validate_arg (arg
, INTEGER_TYPE
))
8188 /* Optimize constant value. */
8189 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8191 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8192 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8193 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8195 width
= TYPE_PRECISION (type
);
8196 lo
= TREE_INT_CST_LOW (arg
);
8197 hi
= TREE_INT_CST_HIGH (arg
);
8199 switch (DECL_FUNCTION_CODE (fndecl
))
8201 case BUILT_IN_BSWAP16
:
8202 case BUILT_IN_BSWAP32
:
8203 case BUILT_IN_BSWAP64
:
8207 for (s
= 0; s
< width
; s
+= 8)
8209 int d
= width
- s
- 8;
8210 unsigned HOST_WIDE_INT byte
;
8212 if (s
< HOST_BITS_PER_WIDE_INT
)
8213 byte
= (lo
>> s
) & 0xff;
8215 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8217 if (d
< HOST_BITS_PER_WIDE_INT
)
8220 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8230 if (width
< HOST_BITS_PER_WIDE_INT
)
8231 return build_int_cst (type
, r_lo
);
8233 return build_int_cst_wide (type
, r_lo
, r_hi
);
8239 /* A subroutine of fold_builtin to fold the various logarithmic
8240 functions. Return NULL_TREE if no simplification can me made.
8241 FUNC is the corresponding MPFR logarithm function. */
8244 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8245 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8247 if (validate_arg (arg
, REAL_TYPE
))
8249 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8251 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8253 /* Calculate the result when the argument is a constant. */
8254 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8257 /* Special case, optimize logN(expN(x)) = x. */
8258 if (flag_unsafe_math_optimizations
8259 && ((func
== mpfr_log
8260 && (fcode
== BUILT_IN_EXP
8261 || fcode
== BUILT_IN_EXPF
8262 || fcode
== BUILT_IN_EXPL
))
8263 || (func
== mpfr_log2
8264 && (fcode
== BUILT_IN_EXP2
8265 || fcode
== BUILT_IN_EXP2F
8266 || fcode
== BUILT_IN_EXP2L
))
8267 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8268 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8270 /* Optimize logN(func()) for various exponential functions. We
8271 want to determine the value "x" and the power "exponent" in
8272 order to transform logN(x**exponent) into exponent*logN(x). */
8273 if (flag_unsafe_math_optimizations
)
8275 tree exponent
= 0, x
= 0;
8279 CASE_FLT_FN (BUILT_IN_EXP
):
8280 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8281 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8283 exponent
= CALL_EXPR_ARG (arg
, 0);
8285 CASE_FLT_FN (BUILT_IN_EXP2
):
8286 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8287 x
= build_real (type
, dconst2
);
8288 exponent
= CALL_EXPR_ARG (arg
, 0);
8290 CASE_FLT_FN (BUILT_IN_EXP10
):
8291 CASE_FLT_FN (BUILT_IN_POW10
):
8292 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8294 REAL_VALUE_TYPE dconst10
;
8295 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8296 x
= build_real (type
, dconst10
);
8298 exponent
= CALL_EXPR_ARG (arg
, 0);
8300 CASE_FLT_FN (BUILT_IN_SQRT
):
8301 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8302 x
= CALL_EXPR_ARG (arg
, 0);
8303 exponent
= build_real (type
, dconsthalf
);
8305 CASE_FLT_FN (BUILT_IN_CBRT
):
8306 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8307 x
= CALL_EXPR_ARG (arg
, 0);
8308 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8311 CASE_FLT_FN (BUILT_IN_POW
):
8312 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8313 x
= CALL_EXPR_ARG (arg
, 0);
8314 exponent
= CALL_EXPR_ARG (arg
, 1);
8320 /* Now perform the optimization. */
8323 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8324 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8332 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8333 NULL_TREE if no simplification can be made. */
8336 fold_builtin_hypot (location_t loc
, tree fndecl
,
8337 tree arg0
, tree arg1
, tree type
)
8339 tree res
, narg0
, narg1
;
8341 if (!validate_arg (arg0
, REAL_TYPE
)
8342 || !validate_arg (arg1
, REAL_TYPE
))
8345 /* Calculate the result when the argument is a constant. */
8346 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8349 /* If either argument to hypot has a negate or abs, strip that off.
8350 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8351 narg0
= fold_strip_sign_ops (arg0
);
8352 narg1
= fold_strip_sign_ops (arg1
);
8355 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8356 narg1
? narg1
: arg1
);
8359 /* If either argument is zero, hypot is fabs of the other. */
8360 if (real_zerop (arg0
))
8361 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8362 else if (real_zerop (arg1
))
8363 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8365 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8366 if (flag_unsafe_math_optimizations
8367 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8369 const REAL_VALUE_TYPE sqrt2_trunc
8370 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8371 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8372 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8373 build_real (type
, sqrt2_trunc
));
8380 /* Fold a builtin function call to pow, powf, or powl. Return
8381 NULL_TREE if no simplification can be made. */
8383 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8387 if (!validate_arg (arg0
, REAL_TYPE
)
8388 || !validate_arg (arg1
, REAL_TYPE
))
8391 /* Calculate the result when the argument is a constant. */
8392 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8395 /* Optimize pow(1.0,y) = 1.0. */
8396 if (real_onep (arg0
))
8397 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8399 if (TREE_CODE (arg1
) == REAL_CST
8400 && !TREE_OVERFLOW (arg1
))
8402 REAL_VALUE_TYPE cint
;
8406 c
= TREE_REAL_CST (arg1
);
8408 /* Optimize pow(x,0.0) = 1.0. */
8409 if (REAL_VALUES_EQUAL (c
, dconst0
))
8410 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8413 /* Optimize pow(x,1.0) = x. */
8414 if (REAL_VALUES_EQUAL (c
, dconst1
))
8417 /* Optimize pow(x,-1.0) = 1.0/x. */
8418 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8419 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8420 build_real (type
, dconst1
), arg0
);
8422 /* Optimize pow(x,0.5) = sqrt(x). */
8423 if (flag_unsafe_math_optimizations
8424 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8426 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8428 if (sqrtfn
!= NULL_TREE
)
8429 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8432 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8433 if (flag_unsafe_math_optimizations
)
8435 const REAL_VALUE_TYPE dconstroot
8436 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8438 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8440 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8441 if (cbrtfn
!= NULL_TREE
)
8442 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8446 /* Check for an integer exponent. */
8447 n
= real_to_integer (&c
);
8448 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8449 if (real_identical (&c
, &cint
))
8451 /* Attempt to evaluate pow at compile-time, unless this should
8452 raise an exception. */
8453 if (TREE_CODE (arg0
) == REAL_CST
8454 && !TREE_OVERFLOW (arg0
)
8456 || (!flag_trapping_math
&& !flag_errno_math
)
8457 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8462 x
= TREE_REAL_CST (arg0
);
8463 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8464 if (flag_unsafe_math_optimizations
|| !inexact
)
8465 return build_real (type
, x
);
8468 /* Strip sign ops from even integer powers. */
8469 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8471 tree narg0
= fold_strip_sign_ops (arg0
);
8473 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8478 if (flag_unsafe_math_optimizations
)
8480 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8482 /* Optimize pow(expN(x),y) = expN(x*y). */
8483 if (BUILTIN_EXPONENT_P (fcode
))
8485 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8486 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8487 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8488 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8491 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8492 if (BUILTIN_SQRT_P (fcode
))
8494 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8495 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8496 build_real (type
, dconsthalf
));
8497 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8500 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8501 if (BUILTIN_CBRT_P (fcode
))
8503 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8504 if (tree_expr_nonnegative_p (arg
))
8506 const REAL_VALUE_TYPE dconstroot
8507 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8508 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8509 build_real (type
, dconstroot
));
8510 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8514 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8515 if (fcode
== BUILT_IN_POW
8516 || fcode
== BUILT_IN_POWF
8517 || fcode
== BUILT_IN_POWL
)
8519 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8520 if (tree_expr_nonnegative_p (arg00
))
8522 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8523 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8524 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8532 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8533 Return NULL_TREE if no simplification can be made. */
8535 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8536 tree arg0
, tree arg1
, tree type
)
8538 if (!validate_arg (arg0
, REAL_TYPE
)
8539 || !validate_arg (arg1
, INTEGER_TYPE
))
8542 /* Optimize pow(1.0,y) = 1.0. */
8543 if (real_onep (arg0
))
8544 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8546 if (host_integerp (arg1
, 0))
8548 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8550 /* Evaluate powi at compile-time. */
8551 if (TREE_CODE (arg0
) == REAL_CST
8552 && !TREE_OVERFLOW (arg0
))
8555 x
= TREE_REAL_CST (arg0
);
8556 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8557 return build_real (type
, x
);
8560 /* Optimize pow(x,0) = 1.0. */
8562 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8565 /* Optimize pow(x,1) = x. */
8569 /* Optimize pow(x,-1) = 1.0/x. */
8571 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8572 build_real (type
, dconst1
), arg0
);
8578 /* A subroutine of fold_builtin to fold the various exponent
8579 functions. Return NULL_TREE if no simplification can be made.
8580 FUNC is the corresponding MPFR exponent function. */
8583 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8584 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8586 if (validate_arg (arg
, REAL_TYPE
))
8588 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8591 /* Calculate the result when the argument is a constant. */
8592 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8595 /* Optimize expN(logN(x)) = x. */
8596 if (flag_unsafe_math_optimizations
)
8598 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8600 if ((func
== mpfr_exp
8601 && (fcode
== BUILT_IN_LOG
8602 || fcode
== BUILT_IN_LOGF
8603 || fcode
== BUILT_IN_LOGL
))
8604 || (func
== mpfr_exp2
8605 && (fcode
== BUILT_IN_LOG2
8606 || fcode
== BUILT_IN_LOG2F
8607 || fcode
== BUILT_IN_LOG2L
))
8608 || (func
== mpfr_exp10
8609 && (fcode
== BUILT_IN_LOG10
8610 || fcode
== BUILT_IN_LOG10F
8611 || fcode
== BUILT_IN_LOG10L
)))
8612 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8619 /* Return true if VAR is a VAR_DECL or a component thereof. */
8622 var_decl_component_p (tree var
)
8625 while (handled_component_p (inner
))
8626 inner
= TREE_OPERAND (inner
, 0);
8627 return SSA_VAR_P (inner
);
8630 /* Fold function call to builtin memset. Return
8631 NULL_TREE if no simplification can be made. */
8634 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8635 tree type
, bool ignore
)
8637 tree var
, ret
, etype
;
8638 unsigned HOST_WIDE_INT length
, cval
;
8640 if (! validate_arg (dest
, POINTER_TYPE
)
8641 || ! validate_arg (c
, INTEGER_TYPE
)
8642 || ! validate_arg (len
, INTEGER_TYPE
))
8645 if (! host_integerp (len
, 1))
8648 /* If the LEN parameter is zero, return DEST. */
8649 if (integer_zerop (len
))
8650 return omit_one_operand_loc (loc
, type
, dest
, c
);
8652 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8657 if (TREE_CODE (var
) != ADDR_EXPR
)
8660 var
= TREE_OPERAND (var
, 0);
8661 if (TREE_THIS_VOLATILE (var
))
8664 etype
= TREE_TYPE (var
);
8665 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8666 etype
= TREE_TYPE (etype
);
8668 if (!INTEGRAL_TYPE_P (etype
)
8669 && !POINTER_TYPE_P (etype
))
8672 if (! var_decl_component_p (var
))
8675 length
= tree_low_cst (len
, 1);
8676 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8677 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8680 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8683 if (integer_zerop (c
))
8687 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8690 cval
= TREE_INT_CST_LOW (c
);
8694 cval
|= (cval
<< 31) << 1;
8697 ret
= build_int_cst_type (etype
, cval
);
8698 var
= build_fold_indirect_ref_loc (loc
,
8699 fold_convert_loc (loc
,
8700 build_pointer_type (etype
),
8702 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8706 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8709 /* Fold function call to builtin memset. Return
8710 NULL_TREE if no simplification can be made. */
8713 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8715 if (! validate_arg (dest
, POINTER_TYPE
)
8716 || ! validate_arg (size
, INTEGER_TYPE
))
8722 /* New argument list transforming bzero(ptr x, int y) to
8723 memset(ptr x, int 0, size_t y). This is done this way
8724 so that if it isn't expanded inline, we fallback to
8725 calling bzero instead of memset. */
8727 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8728 fold_convert_loc (loc
, size_type_node
, size
),
8729 void_type_node
, ignore
);
8732 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8733 NULL_TREE if no simplification can be made.
8734 If ENDP is 0, return DEST (like memcpy).
8735 If ENDP is 1, return DEST+LEN (like mempcpy).
8736 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8737 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8741 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8742 tree len
, tree type
, bool ignore
, int endp
)
8744 tree destvar
, srcvar
, expr
;
8746 if (! validate_arg (dest
, POINTER_TYPE
)
8747 || ! validate_arg (src
, POINTER_TYPE
)
8748 || ! validate_arg (len
, INTEGER_TYPE
))
8751 /* If the LEN parameter is zero, return DEST. */
8752 if (integer_zerop (len
))
8753 return omit_one_operand_loc (loc
, type
, dest
, src
);
8755 /* If SRC and DEST are the same (and not volatile), return
8756 DEST{,+LEN,+LEN-1}. */
8757 if (operand_equal_p (src
, dest
, 0))
8761 tree srctype
, desttype
;
8762 unsigned int src_align
, dest_align
;
8767 src_align
= get_pointer_alignment (src
);
8768 dest_align
= get_pointer_alignment (dest
);
8770 /* Both DEST and SRC must be pointer types.
8771 ??? This is what old code did. Is the testing for pointer types
8774 If either SRC is readonly or length is 1, we can use memcpy. */
8775 if (!dest_align
|| !src_align
)
8777 if (readonly_data_expr (src
)
8778 || (host_integerp (len
, 1)
8779 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8780 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8782 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8785 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8788 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8789 if (TREE_CODE (src
) == ADDR_EXPR
8790 && TREE_CODE (dest
) == ADDR_EXPR
)
8792 tree src_base
, dest_base
, fn
;
8793 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8794 HOST_WIDE_INT size
= -1;
8795 HOST_WIDE_INT maxsize
= -1;
8797 srcvar
= TREE_OPERAND (src
, 0);
8798 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8800 destvar
= TREE_OPERAND (dest
, 0);
8801 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8803 if (host_integerp (len
, 1))
8804 maxsize
= tree_low_cst (len
, 1);
8807 src_offset
/= BITS_PER_UNIT
;
8808 dest_offset
/= BITS_PER_UNIT
;
8809 if (SSA_VAR_P (src_base
)
8810 && SSA_VAR_P (dest_base
))
8812 if (operand_equal_p (src_base
, dest_base
, 0)
8813 && ranges_overlap_p (src_offset
, maxsize
,
8814 dest_offset
, maxsize
))
8817 else if (TREE_CODE (src_base
) == MEM_REF
8818 && TREE_CODE (dest_base
) == MEM_REF
)
8821 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8822 TREE_OPERAND (dest_base
, 0), 0))
8824 off
= mem_ref_offset (src_base
) +
8825 double_int::from_shwi (src_offset
);
8826 if (!off
.fits_shwi ())
8828 src_offset
= off
.low
;
8829 off
= mem_ref_offset (dest_base
) +
8830 double_int::from_shwi (dest_offset
);
8831 if (!off
.fits_shwi ())
8833 dest_offset
= off
.low
;
8834 if (ranges_overlap_p (src_offset
, maxsize
,
8835 dest_offset
, maxsize
))
8841 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8844 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8847 /* If the destination and source do not alias optimize into
8849 if ((is_gimple_min_invariant (dest
)
8850 || TREE_CODE (dest
) == SSA_NAME
)
8851 && (is_gimple_min_invariant (src
)
8852 || TREE_CODE (src
) == SSA_NAME
))
8855 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8856 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8857 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8860 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8863 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8870 if (!host_integerp (len
, 0))
8873 This logic lose for arguments like (type *)malloc (sizeof (type)),
8874 since we strip the casts of up to VOID return value from malloc.
8875 Perhaps we ought to inherit type from non-VOID argument here? */
8878 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8879 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8881 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8882 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8884 tree tem
= TREE_OPERAND (src
, 0);
8886 if (tem
!= TREE_OPERAND (src
, 0))
8887 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8889 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8891 tree tem
= TREE_OPERAND (dest
, 0);
8893 if (tem
!= TREE_OPERAND (dest
, 0))
8894 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8896 srctype
= TREE_TYPE (TREE_TYPE (src
));
8897 if (TREE_CODE (srctype
) == ARRAY_TYPE
8898 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8900 srctype
= TREE_TYPE (srctype
);
8902 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8904 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8905 if (TREE_CODE (desttype
) == ARRAY_TYPE
8906 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8908 desttype
= TREE_TYPE (desttype
);
8910 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8912 if (TREE_ADDRESSABLE (srctype
)
8913 || TREE_ADDRESSABLE (desttype
))
8916 src_align
= get_pointer_alignment (src
);
8917 dest_align
= get_pointer_alignment (dest
);
8918 if (dest_align
< TYPE_ALIGN (desttype
)
8919 || src_align
< TYPE_ALIGN (srctype
))
8923 dest
= builtin_save_expr (dest
);
8925 /* Build accesses at offset zero with a ref-all character type. */
8926 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8927 ptr_mode
, true), 0);
8930 STRIP_NOPS (destvar
);
8931 if (TREE_CODE (destvar
) == ADDR_EXPR
8932 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8933 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8934 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8936 destvar
= NULL_TREE
;
8939 STRIP_NOPS (srcvar
);
8940 if (TREE_CODE (srcvar
) == ADDR_EXPR
8941 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8942 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8945 || src_align
>= TYPE_ALIGN (desttype
))
8946 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8948 else if (!STRICT_ALIGNMENT
)
8950 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8952 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8960 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8963 if (srcvar
== NULL_TREE
)
8966 if (src_align
>= TYPE_ALIGN (desttype
))
8967 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8970 if (STRICT_ALIGNMENT
)
8972 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8974 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
8977 else if (destvar
== NULL_TREE
)
8980 if (dest_align
>= TYPE_ALIGN (srctype
))
8981 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8984 if (STRICT_ALIGNMENT
)
8986 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
8988 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
8992 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
8998 if (endp
== 0 || endp
== 3)
8999 return omit_one_operand_loc (loc
, type
, dest
, expr
);
9005 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
9008 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9009 dest
= fold_convert_loc (loc
, type
, dest
);
9011 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
9015 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9016 If LEN is not NULL, it represents the length of the string to be
9017 copied. Return NULL_TREE if no simplification can be made. */
9020 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
9024 if (!validate_arg (dest
, POINTER_TYPE
)
9025 || !validate_arg (src
, POINTER_TYPE
))
9028 /* If SRC and DEST are the same (and not volatile), return DEST. */
9029 if (operand_equal_p (src
, dest
, 0))
9030 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
9032 if (optimize_function_for_size_p (cfun
))
9035 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9041 len
= c_strlen (src
, 1);
9042 if (! len
|| TREE_SIDE_EFFECTS (len
))
9046 len
= fold_convert_loc (loc
, size_type_node
, len
);
9047 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
9048 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9049 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9052 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9053 Return NULL_TREE if no simplification can be made. */
9056 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
9058 tree fn
, len
, lenp1
, call
, type
;
9060 if (!validate_arg (dest
, POINTER_TYPE
)
9061 || !validate_arg (src
, POINTER_TYPE
))
9064 len
= c_strlen (src
, 1);
9066 || TREE_CODE (len
) != INTEGER_CST
)
9069 if (optimize_function_for_size_p (cfun
)
9070 /* If length is zero it's small enough. */
9071 && !integer_zerop (len
))
9074 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9078 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
9079 fold_convert_loc (loc
, size_type_node
, len
),
9080 build_int_cst (size_type_node
, 1));
9081 /* We use dest twice in building our expression. Save it from
9082 multiple expansions. */
9083 dest
= builtin_save_expr (dest
);
9084 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
9086 type
= TREE_TYPE (TREE_TYPE (fndecl
));
9087 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9088 dest
= fold_convert_loc (loc
, type
, dest
);
9089 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
9093 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9094 If SLEN is not NULL, it represents the length of the source string.
9095 Return NULL_TREE if no simplification can be made. */
9098 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
9099 tree src
, tree len
, tree slen
)
9103 if (!validate_arg (dest
, POINTER_TYPE
)
9104 || !validate_arg (src
, POINTER_TYPE
)
9105 || !validate_arg (len
, INTEGER_TYPE
))
9108 /* If the LEN parameter is zero, return DEST. */
9109 if (integer_zerop (len
))
9110 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
9112 /* We can't compare slen with len as constants below if len is not a
9114 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
9118 slen
= c_strlen (src
, 1);
9120 /* Now, we must be passed a constant src ptr parameter. */
9121 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
9124 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
9126 /* We do not support simplification of this case, though we do
9127 support it when expanding trees into RTL. */
9128 /* FIXME: generate a call to __builtin_memset. */
9129 if (tree_int_cst_lt (slen
, len
))
9132 /* OK transform into builtin memcpy. */
9133 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9137 len
= fold_convert_loc (loc
, size_type_node
, len
);
9138 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9139 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9142 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9143 arguments to the call, and TYPE is its return type.
9144 Return NULL_TREE if no simplification can be made. */
9147 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9149 if (!validate_arg (arg1
, POINTER_TYPE
)
9150 || !validate_arg (arg2
, INTEGER_TYPE
)
9151 || !validate_arg (len
, INTEGER_TYPE
))
9157 if (TREE_CODE (arg2
) != INTEGER_CST
9158 || !host_integerp (len
, 1))
9161 p1
= c_getstr (arg1
);
9162 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9168 if (target_char_cast (arg2
, &c
))
9171 r
= (const char *) memchr (p1
, c
, tree_low_cst (len
, 1));
9174 return build_int_cst (TREE_TYPE (arg1
), 0);
9176 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9177 return fold_convert_loc (loc
, type
, tem
);
9183 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9184 Return NULL_TREE if no simplification can be made. */
9187 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9189 const char *p1
, *p2
;
9191 if (!validate_arg (arg1
, POINTER_TYPE
)
9192 || !validate_arg (arg2
, POINTER_TYPE
)
9193 || !validate_arg (len
, INTEGER_TYPE
))
9196 /* If the LEN parameter is zero, return zero. */
9197 if (integer_zerop (len
))
9198 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9201 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9202 if (operand_equal_p (arg1
, arg2
, 0))
9203 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9205 p1
= c_getstr (arg1
);
9206 p2
= c_getstr (arg2
);
9208 /* If all arguments are constant, and the value of len is not greater
9209 than the lengths of arg1 and arg2, evaluate at compile-time. */
9210 if (host_integerp (len
, 1) && p1
&& p2
9211 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9212 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9214 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9217 return integer_one_node
;
9219 return integer_minus_one_node
;
9221 return integer_zero_node
;
9224 /* If len parameter is one, return an expression corresponding to
9225 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9226 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9228 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9229 tree cst_uchar_ptr_node
9230 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9233 = fold_convert_loc (loc
, integer_type_node
,
9234 build1 (INDIRECT_REF
, cst_uchar_node
,
9235 fold_convert_loc (loc
,
9239 = fold_convert_loc (loc
, integer_type_node
,
9240 build1 (INDIRECT_REF
, cst_uchar_node
,
9241 fold_convert_loc (loc
,
9244 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9250 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9251 Return NULL_TREE if no simplification can be made. */
9254 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9256 const char *p1
, *p2
;
9258 if (!validate_arg (arg1
, POINTER_TYPE
)
9259 || !validate_arg (arg2
, POINTER_TYPE
))
9262 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9263 if (operand_equal_p (arg1
, arg2
, 0))
9264 return integer_zero_node
;
9266 p1
= c_getstr (arg1
);
9267 p2
= c_getstr (arg2
);
9271 const int i
= strcmp (p1
, p2
);
9273 return integer_minus_one_node
;
9275 return integer_one_node
;
9277 return integer_zero_node
;
9280 /* If the second arg is "", return *(const unsigned char*)arg1. */
9281 if (p2
&& *p2
== '\0')
9283 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9284 tree cst_uchar_ptr_node
9285 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9287 return fold_convert_loc (loc
, integer_type_node
,
9288 build1 (INDIRECT_REF
, cst_uchar_node
,
9289 fold_convert_loc (loc
,
9294 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9295 if (p1
&& *p1
== '\0')
9297 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9298 tree cst_uchar_ptr_node
9299 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9302 = fold_convert_loc (loc
, integer_type_node
,
9303 build1 (INDIRECT_REF
, cst_uchar_node
,
9304 fold_convert_loc (loc
,
9307 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9313 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9314 Return NULL_TREE if no simplification can be made. */
9317 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9319 const char *p1
, *p2
;
9321 if (!validate_arg (arg1
, POINTER_TYPE
)
9322 || !validate_arg (arg2
, POINTER_TYPE
)
9323 || !validate_arg (len
, INTEGER_TYPE
))
9326 /* If the LEN parameter is zero, return zero. */
9327 if (integer_zerop (len
))
9328 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9331 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9332 if (operand_equal_p (arg1
, arg2
, 0))
9333 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9335 p1
= c_getstr (arg1
);
9336 p2
= c_getstr (arg2
);
9338 if (host_integerp (len
, 1) && p1
&& p2
)
9340 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9342 return integer_one_node
;
9344 return integer_minus_one_node
;
9346 return integer_zero_node
;
9349 /* If the second arg is "", and the length is greater than zero,
9350 return *(const unsigned char*)arg1. */
9351 if (p2
&& *p2
== '\0'
9352 && TREE_CODE (len
) == INTEGER_CST
9353 && tree_int_cst_sgn (len
) == 1)
9355 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9356 tree cst_uchar_ptr_node
9357 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9359 return fold_convert_loc (loc
, integer_type_node
,
9360 build1 (INDIRECT_REF
, cst_uchar_node
,
9361 fold_convert_loc (loc
,
9366 /* If the first arg is "", and the length is greater than zero,
9367 return -*(const unsigned char*)arg2. */
9368 if (p1
&& *p1
== '\0'
9369 && TREE_CODE (len
) == INTEGER_CST
9370 && tree_int_cst_sgn (len
) == 1)
9372 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9373 tree cst_uchar_ptr_node
9374 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9376 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9377 build1 (INDIRECT_REF
, cst_uchar_node
,
9378 fold_convert_loc (loc
,
9381 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9384 /* If len parameter is one, return an expression corresponding to
9385 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9386 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9388 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9389 tree cst_uchar_ptr_node
9390 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9392 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9393 build1 (INDIRECT_REF
, cst_uchar_node
,
9394 fold_convert_loc (loc
,
9397 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9398 build1 (INDIRECT_REF
, cst_uchar_node
,
9399 fold_convert_loc (loc
,
9402 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9408 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9409 ARG. Return NULL_TREE if no simplification can be made. */
9412 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9414 if (!validate_arg (arg
, REAL_TYPE
))
9417 /* If ARG is a compile-time constant, determine the result. */
9418 if (TREE_CODE (arg
) == REAL_CST
9419 && !TREE_OVERFLOW (arg
))
9423 c
= TREE_REAL_CST (arg
);
9424 return (REAL_VALUE_NEGATIVE (c
)
9425 ? build_one_cst (type
)
9426 : build_zero_cst (type
));
9429 /* If ARG is non-negative, the result is always zero. */
9430 if (tree_expr_nonnegative_p (arg
))
9431 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9433 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9434 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9435 return fold_convert (type
,
9436 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9437 build_real (TREE_TYPE (arg
), dconst0
)));
9442 /* Fold function call to builtin copysign, copysignf or copysignl with
9443 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9447 fold_builtin_copysign (location_t loc
, tree fndecl
,
9448 tree arg1
, tree arg2
, tree type
)
9452 if (!validate_arg (arg1
, REAL_TYPE
)
9453 || !validate_arg (arg2
, REAL_TYPE
))
9456 /* copysign(X,X) is X. */
9457 if (operand_equal_p (arg1
, arg2
, 0))
9458 return fold_convert_loc (loc
, type
, arg1
);
9460 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9461 if (TREE_CODE (arg1
) == REAL_CST
9462 && TREE_CODE (arg2
) == REAL_CST
9463 && !TREE_OVERFLOW (arg1
)
9464 && !TREE_OVERFLOW (arg2
))
9466 REAL_VALUE_TYPE c1
, c2
;
9468 c1
= TREE_REAL_CST (arg1
);
9469 c2
= TREE_REAL_CST (arg2
);
9470 /* c1.sign := c2.sign. */
9471 real_copysign (&c1
, &c2
);
9472 return build_real (type
, c1
);
9475 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9476 Remember to evaluate Y for side-effects. */
9477 if (tree_expr_nonnegative_p (arg2
))
9478 return omit_one_operand_loc (loc
, type
,
9479 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9482 /* Strip sign changing operations for the first argument. */
9483 tem
= fold_strip_sign_ops (arg1
);
9485 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9490 /* Fold a call to builtin isascii with argument ARG. */
9493 fold_builtin_isascii (location_t loc
, tree arg
)
9495 if (!validate_arg (arg
, INTEGER_TYPE
))
9499 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9500 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9501 build_int_cst (integer_type_node
,
9502 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9503 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9504 arg
, integer_zero_node
);
9508 /* Fold a call to builtin toascii with argument ARG. */
9511 fold_builtin_toascii (location_t loc
, tree arg
)
9513 if (!validate_arg (arg
, INTEGER_TYPE
))
9516 /* Transform toascii(c) -> (c & 0x7f). */
9517 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9518 build_int_cst (integer_type_node
, 0x7f));
9521 /* Fold a call to builtin isdigit with argument ARG. */
9524 fold_builtin_isdigit (location_t loc
, tree arg
)
9526 if (!validate_arg (arg
, INTEGER_TYPE
))
9530 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9531 /* According to the C standard, isdigit is unaffected by locale.
9532 However, it definitely is affected by the target character set. */
9533 unsigned HOST_WIDE_INT target_digit0
9534 = lang_hooks
.to_target_charset ('0');
9536 if (target_digit0
== 0)
9539 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9540 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9541 build_int_cst (unsigned_type_node
, target_digit0
));
9542 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9543 build_int_cst (unsigned_type_node
, 9));
9547 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9550 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9552 if (!validate_arg (arg
, REAL_TYPE
))
9555 arg
= fold_convert_loc (loc
, type
, arg
);
9556 if (TREE_CODE (arg
) == REAL_CST
)
9557 return fold_abs_const (arg
, type
);
9558 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9561 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9564 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9566 if (!validate_arg (arg
, INTEGER_TYPE
))
9569 arg
= fold_convert_loc (loc
, type
, arg
);
9570 if (TREE_CODE (arg
) == INTEGER_CST
)
9571 return fold_abs_const (arg
, type
);
9572 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9575 /* Fold a fma operation with arguments ARG[012]. */
9578 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9579 tree type
, tree arg0
, tree arg1
, tree arg2
)
9581 if (TREE_CODE (arg0
) == REAL_CST
9582 && TREE_CODE (arg1
) == REAL_CST
9583 && TREE_CODE (arg2
) == REAL_CST
)
9584 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9589 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9592 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9594 if (validate_arg (arg0
, REAL_TYPE
)
9595 && validate_arg(arg1
, REAL_TYPE
)
9596 && validate_arg(arg2
, REAL_TYPE
))
9598 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9602 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9603 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9604 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9609 /* Fold a call to builtin fmin or fmax. */
9612 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9613 tree type
, bool max
)
9615 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9617 /* Calculate the result when the argument is a constant. */
9618 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9623 /* If either argument is NaN, return the other one. Avoid the
9624 transformation if we get (and honor) a signalling NaN. Using
9625 omit_one_operand() ensures we create a non-lvalue. */
9626 if (TREE_CODE (arg0
) == REAL_CST
9627 && real_isnan (&TREE_REAL_CST (arg0
))
9628 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9629 || ! TREE_REAL_CST (arg0
).signalling
))
9630 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9631 if (TREE_CODE (arg1
) == REAL_CST
9632 && real_isnan (&TREE_REAL_CST (arg1
))
9633 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9634 || ! TREE_REAL_CST (arg1
).signalling
))
9635 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9637 /* Transform fmin/fmax(x,x) -> x. */
9638 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9639 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9641 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9642 functions to return the numeric arg if the other one is NaN.
9643 These tree codes don't honor that, so only transform if
9644 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9645 handled, so we don't have to worry about it either. */
9646 if (flag_finite_math_only
)
9647 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9648 fold_convert_loc (loc
, type
, arg0
),
9649 fold_convert_loc (loc
, type
, arg1
));
9654 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9657 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9659 if (validate_arg (arg
, COMPLEX_TYPE
)
9660 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9662 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9666 tree new_arg
= builtin_save_expr (arg
);
9667 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9668 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9669 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9676 /* Fold a call to builtin logb/ilogb. */
9679 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9681 if (! validate_arg (arg
, REAL_TYPE
))
9686 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9688 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9694 /* If arg is Inf or NaN and we're logb, return it. */
9695 if (TREE_CODE (rettype
) == REAL_TYPE
)
9696 return fold_convert_loc (loc
, rettype
, arg
);
9697 /* Fall through... */
9699 /* Zero may set errno and/or raise an exception for logb, also
9700 for ilogb we don't know FP_ILOGB0. */
9703 /* For normal numbers, proceed iff radix == 2. In GCC,
9704 normalized significands are in the range [0.5, 1.0). We
9705 want the exponent as if they were [1.0, 2.0) so get the
9706 exponent and subtract 1. */
9707 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9708 return fold_convert_loc (loc
, rettype
,
9709 build_int_cst (integer_type_node
,
9710 REAL_EXP (value
)-1));
9718 /* Fold a call to builtin significand, if radix == 2. */
9721 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9723 if (! validate_arg (arg
, REAL_TYPE
))
9728 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9730 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9737 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9738 return fold_convert_loc (loc
, rettype
, arg
);
9740 /* For normal numbers, proceed iff radix == 2. */
9741 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9743 REAL_VALUE_TYPE result
= *value
;
9744 /* In GCC, normalized significands are in the range [0.5,
9745 1.0). We want them to be [1.0, 2.0) so set the
9747 SET_REAL_EXP (&result
, 1);
9748 return build_real (rettype
, result
);
9757 /* Fold a call to builtin frexp, we can assume the base is 2. */
9760 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9762 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9767 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9770 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9772 /* Proceed if a valid pointer type was passed in. */
9773 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9775 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9781 /* For +-0, return (*exp = 0, +-0). */
9782 exp
= integer_zero_node
;
9787 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9788 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9791 /* Since the frexp function always expects base 2, and in
9792 GCC normalized significands are already in the range
9793 [0.5, 1.0), we have exactly what frexp wants. */
9794 REAL_VALUE_TYPE frac_rvt
= *value
;
9795 SET_REAL_EXP (&frac_rvt
, 0);
9796 frac
= build_real (rettype
, frac_rvt
);
9797 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9804 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9805 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9806 TREE_SIDE_EFFECTS (arg1
) = 1;
9807 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9813 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9814 then we can assume the base is two. If it's false, then we have to
9815 check the mode of the TYPE parameter in certain cases. */
9818 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9819 tree type
, bool ldexp
)
9821 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9826 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9827 if (real_zerop (arg0
) || integer_zerop (arg1
)
9828 || (TREE_CODE (arg0
) == REAL_CST
9829 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9830 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9832 /* If both arguments are constant, then try to evaluate it. */
9833 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9834 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9835 && host_integerp (arg1
, 0))
9837 /* Bound the maximum adjustment to twice the range of the
9838 mode's valid exponents. Use abs to ensure the range is
9839 positive as a sanity check. */
9840 const long max_exp_adj
= 2 *
9841 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9842 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9844 /* Get the user-requested adjustment. */
9845 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9847 /* The requested adjustment must be inside this range. This
9848 is a preliminary cap to avoid things like overflow, we
9849 may still fail to compute the result for other reasons. */
9850 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9852 REAL_VALUE_TYPE initial_result
;
9854 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9856 /* Ensure we didn't overflow. */
9857 if (! real_isinf (&initial_result
))
9859 const REAL_VALUE_TYPE trunc_result
9860 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9862 /* Only proceed if the target mode can hold the
9864 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9865 return build_real (type
, trunc_result
);
9874 /* Fold a call to builtin modf. */
9877 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9879 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9884 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9887 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9889 /* Proceed if a valid pointer type was passed in. */
9890 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9892 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9893 REAL_VALUE_TYPE trunc
, frac
;
9899 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9900 trunc
= frac
= *value
;
9903 /* For +-Inf, return (*arg1 = arg0, +-0). */
9905 frac
.sign
= value
->sign
;
9909 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9910 real_trunc (&trunc
, VOIDmode
, value
);
9911 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9912 /* If the original number was negative and already
9913 integral, then the fractional part is -0.0. */
9914 if (value
->sign
&& frac
.cl
== rvc_zero
)
9915 frac
.sign
= value
->sign
;
9919 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9920 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9921 build_real (rettype
, trunc
));
9922 TREE_SIDE_EFFECTS (arg1
) = 1;
9923 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9924 build_real (rettype
, frac
));
9930 /* Given a location LOC, an interclass builtin function decl FNDECL
9931 and its single argument ARG, return an folded expression computing
9932 the same, or NULL_TREE if we either couldn't or didn't want to fold
9933 (the latter happen if there's an RTL instruction available). */
9936 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9938 enum machine_mode mode
;
9940 if (!validate_arg (arg
, REAL_TYPE
))
9943 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9946 mode
= TYPE_MODE (TREE_TYPE (arg
));
9948 /* If there is no optab, try generic code. */
9949 switch (DECL_FUNCTION_CODE (fndecl
))
9953 CASE_FLT_FN (BUILT_IN_ISINF
):
9955 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9956 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9957 tree
const type
= TREE_TYPE (arg
);
9961 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9962 real_from_string (&r
, buf
);
9963 result
= build_call_expr (isgr_fn
, 2,
9964 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9965 build_real (type
, r
));
9968 CASE_FLT_FN (BUILT_IN_FINITE
):
9969 case BUILT_IN_ISFINITE
:
9971 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9972 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9973 tree
const type
= TREE_TYPE (arg
);
9977 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9978 real_from_string (&r
, buf
);
9979 result
= build_call_expr (isle_fn
, 2,
9980 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9981 build_real (type
, r
));
9982 /*result = fold_build2_loc (loc, UNGT_EXPR,
9983 TREE_TYPE (TREE_TYPE (fndecl)),
9984 fold_build1_loc (loc, ABS_EXPR, type, arg),
9985 build_real (type, r));
9986 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9987 TREE_TYPE (TREE_TYPE (fndecl)),
9991 case BUILT_IN_ISNORMAL
:
9993 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9994 islessequal(fabs(x),DBL_MAX). */
9995 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9996 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9997 tree
const type
= TREE_TYPE (arg
);
9998 REAL_VALUE_TYPE rmax
, rmin
;
10001 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10002 real_from_string (&rmax
, buf
);
10003 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10004 real_from_string (&rmin
, buf
);
10005 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10006 result
= build_call_expr (isle_fn
, 2, arg
,
10007 build_real (type
, rmax
));
10008 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
10009 build_call_expr (isge_fn
, 2, arg
,
10010 build_real (type
, rmin
)));
10020 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10021 ARG is the argument for the call. */
10024 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
10026 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10029 if (!validate_arg (arg
, REAL_TYPE
))
10032 switch (builtin_index
)
10034 case BUILT_IN_ISINF
:
10035 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10036 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10038 if (TREE_CODE (arg
) == REAL_CST
)
10040 r
= TREE_REAL_CST (arg
);
10041 if (real_isinf (&r
))
10042 return real_compare (GT_EXPR
, &r
, &dconst0
)
10043 ? integer_one_node
: integer_minus_one_node
;
10045 return integer_zero_node
;
10050 case BUILT_IN_ISINF_SIGN
:
10052 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10053 /* In a boolean context, GCC will fold the inner COND_EXPR to
10054 1. So e.g. "if (isinf_sign(x))" would be folded to just
10055 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10056 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
10057 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
10058 tree tmp
= NULL_TREE
;
10060 arg
= builtin_save_expr (arg
);
10062 if (signbit_fn
&& isinf_fn
)
10064 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
10065 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
10067 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10068 signbit_call
, integer_zero_node
);
10069 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10070 isinf_call
, integer_zero_node
);
10072 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
10073 integer_minus_one_node
, integer_one_node
);
10074 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10076 integer_zero_node
);
10082 case BUILT_IN_ISFINITE
:
10083 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
10084 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10085 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10087 if (TREE_CODE (arg
) == REAL_CST
)
10089 r
= TREE_REAL_CST (arg
);
10090 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
10095 case BUILT_IN_ISNAN
:
10096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
10097 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10099 if (TREE_CODE (arg
) == REAL_CST
)
10101 r
= TREE_REAL_CST (arg
);
10102 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
10105 arg
= builtin_save_expr (arg
);
10106 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
10109 gcc_unreachable ();
10113 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10114 This builtin will generate code to return the appropriate floating
10115 point classification depending on the value of the floating point
10116 number passed in. The possible return values must be supplied as
10117 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10118 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10119 one floating point argument which is "type generic". */
10122 fold_builtin_fpclassify (location_t loc
, tree exp
)
10124 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10125 arg
, type
, res
, tmp
;
10126 enum machine_mode mode
;
10130 /* Verify the required arguments in the original call. */
10131 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10132 INTEGER_TYPE
, INTEGER_TYPE
,
10133 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10136 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10137 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10138 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10139 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10140 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10141 arg
= CALL_EXPR_ARG (exp
, 5);
10142 type
= TREE_TYPE (arg
);
10143 mode
= TYPE_MODE (type
);
10144 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10146 /* fpclassify(x) ->
10147 isnan(x) ? FP_NAN :
10148 (fabs(x) == Inf ? FP_INFINITE :
10149 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10150 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10152 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10153 build_real (type
, dconst0
));
10154 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10155 tmp
, fp_zero
, fp_subnormal
);
10157 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10158 real_from_string (&r
, buf
);
10159 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10160 arg
, build_real (type
, r
));
10161 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10163 if (HONOR_INFINITIES (mode
))
10166 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10167 build_real (type
, r
));
10168 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10172 if (HONOR_NANS (mode
))
10174 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10175 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10181 /* Fold a call to an unordered comparison function such as
10182 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10183 being called and ARG0 and ARG1 are the arguments for the call.
10184 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10185 the opposite of the desired result. UNORDERED_CODE is used
10186 for modes that can hold NaNs and ORDERED_CODE is used for
10190 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10191 enum tree_code unordered_code
,
10192 enum tree_code ordered_code
)
10194 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10195 enum tree_code code
;
10197 enum tree_code code0
, code1
;
10198 tree cmp_type
= NULL_TREE
;
10200 type0
= TREE_TYPE (arg0
);
10201 type1
= TREE_TYPE (arg1
);
10203 code0
= TREE_CODE (type0
);
10204 code1
= TREE_CODE (type1
);
10206 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10207 /* Choose the wider of two real types. */
10208 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10210 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10212 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10215 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10216 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10218 if (unordered_code
== UNORDERED_EXPR
)
10220 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10221 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10222 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10225 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10227 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10228 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10231 /* Fold a call to built-in function FNDECL with 0 arguments.
10232 IGNORE is true if the result of the function call is ignored. This
10233 function returns NULL_TREE if no simplification was possible. */
10236 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10238 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10239 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10242 CASE_FLT_FN (BUILT_IN_INF
):
10243 case BUILT_IN_INFD32
:
10244 case BUILT_IN_INFD64
:
10245 case BUILT_IN_INFD128
:
10246 return fold_builtin_inf (loc
, type
, true);
10248 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10249 return fold_builtin_inf (loc
, type
, false);
10251 case BUILT_IN_CLASSIFY_TYPE
:
10252 return fold_builtin_classify_type (NULL_TREE
);
10260 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10261 IGNORE is true if the result of the function call is ignored. This
10262 function returns NULL_TREE if no simplification was possible. */
10265 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10267 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10268 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10271 case BUILT_IN_CONSTANT_P
:
10273 tree val
= fold_builtin_constant_p (arg0
);
10275 /* Gimplification will pull the CALL_EXPR for the builtin out of
10276 an if condition. When not optimizing, we'll not CSE it back.
10277 To avoid link error types of regressions, return false now. */
10278 if (!val
&& !optimize
)
10279 val
= integer_zero_node
;
10284 case BUILT_IN_CLASSIFY_TYPE
:
10285 return fold_builtin_classify_type (arg0
);
10287 case BUILT_IN_STRLEN
:
10288 return fold_builtin_strlen (loc
, type
, arg0
);
10290 CASE_FLT_FN (BUILT_IN_FABS
):
10291 return fold_builtin_fabs (loc
, arg0
, type
);
10294 case BUILT_IN_LABS
:
10295 case BUILT_IN_LLABS
:
10296 case BUILT_IN_IMAXABS
:
10297 return fold_builtin_abs (loc
, arg0
, type
);
10299 CASE_FLT_FN (BUILT_IN_CONJ
):
10300 if (validate_arg (arg0
, COMPLEX_TYPE
)
10301 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10302 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10305 CASE_FLT_FN (BUILT_IN_CREAL
):
10306 if (validate_arg (arg0
, COMPLEX_TYPE
)
10307 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10308 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10311 CASE_FLT_FN (BUILT_IN_CIMAG
):
10312 if (validate_arg (arg0
, COMPLEX_TYPE
)
10313 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10314 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10317 CASE_FLT_FN (BUILT_IN_CCOS
):
10318 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10320 CASE_FLT_FN (BUILT_IN_CCOSH
):
10321 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10323 CASE_FLT_FN (BUILT_IN_CPROJ
):
10324 return fold_builtin_cproj(loc
, arg0
, type
);
10326 CASE_FLT_FN (BUILT_IN_CSIN
):
10327 if (validate_arg (arg0
, COMPLEX_TYPE
)
10328 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10329 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10332 CASE_FLT_FN (BUILT_IN_CSINH
):
10333 if (validate_arg (arg0
, COMPLEX_TYPE
)
10334 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10335 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10338 CASE_FLT_FN (BUILT_IN_CTAN
):
10339 if (validate_arg (arg0
, COMPLEX_TYPE
)
10340 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10341 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10344 CASE_FLT_FN (BUILT_IN_CTANH
):
10345 if (validate_arg (arg0
, COMPLEX_TYPE
)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10347 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10350 CASE_FLT_FN (BUILT_IN_CLOG
):
10351 if (validate_arg (arg0
, COMPLEX_TYPE
)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10353 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10356 CASE_FLT_FN (BUILT_IN_CSQRT
):
10357 if (validate_arg (arg0
, COMPLEX_TYPE
)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10359 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10362 CASE_FLT_FN (BUILT_IN_CASIN
):
10363 if (validate_arg (arg0
, COMPLEX_TYPE
)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10365 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10368 CASE_FLT_FN (BUILT_IN_CACOS
):
10369 if (validate_arg (arg0
, COMPLEX_TYPE
)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10371 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10374 CASE_FLT_FN (BUILT_IN_CATAN
):
10375 if (validate_arg (arg0
, COMPLEX_TYPE
)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10377 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10380 CASE_FLT_FN (BUILT_IN_CASINH
):
10381 if (validate_arg (arg0
, COMPLEX_TYPE
)
10382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10383 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10386 CASE_FLT_FN (BUILT_IN_CACOSH
):
10387 if (validate_arg (arg0
, COMPLEX_TYPE
)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10389 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10392 CASE_FLT_FN (BUILT_IN_CATANH
):
10393 if (validate_arg (arg0
, COMPLEX_TYPE
)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10395 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10398 CASE_FLT_FN (BUILT_IN_CABS
):
10399 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10401 CASE_FLT_FN (BUILT_IN_CARG
):
10402 return fold_builtin_carg (loc
, arg0
, type
);
10404 CASE_FLT_FN (BUILT_IN_SQRT
):
10405 return fold_builtin_sqrt (loc
, arg0
, type
);
10407 CASE_FLT_FN (BUILT_IN_CBRT
):
10408 return fold_builtin_cbrt (loc
, arg0
, type
);
10410 CASE_FLT_FN (BUILT_IN_ASIN
):
10411 if (validate_arg (arg0
, REAL_TYPE
))
10412 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10413 &dconstm1
, &dconst1
, true);
10416 CASE_FLT_FN (BUILT_IN_ACOS
):
10417 if (validate_arg (arg0
, REAL_TYPE
))
10418 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10419 &dconstm1
, &dconst1
, true);
10422 CASE_FLT_FN (BUILT_IN_ATAN
):
10423 if (validate_arg (arg0
, REAL_TYPE
))
10424 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10427 CASE_FLT_FN (BUILT_IN_ASINH
):
10428 if (validate_arg (arg0
, REAL_TYPE
))
10429 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10432 CASE_FLT_FN (BUILT_IN_ACOSH
):
10433 if (validate_arg (arg0
, REAL_TYPE
))
10434 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10435 &dconst1
, NULL
, true);
10438 CASE_FLT_FN (BUILT_IN_ATANH
):
10439 if (validate_arg (arg0
, REAL_TYPE
))
10440 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10441 &dconstm1
, &dconst1
, false);
10444 CASE_FLT_FN (BUILT_IN_SIN
):
10445 if (validate_arg (arg0
, REAL_TYPE
))
10446 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10449 CASE_FLT_FN (BUILT_IN_COS
):
10450 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10452 CASE_FLT_FN (BUILT_IN_TAN
):
10453 return fold_builtin_tan (arg0
, type
);
10455 CASE_FLT_FN (BUILT_IN_CEXP
):
10456 return fold_builtin_cexp (loc
, arg0
, type
);
10458 CASE_FLT_FN (BUILT_IN_CEXPI
):
10459 if (validate_arg (arg0
, REAL_TYPE
))
10460 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10463 CASE_FLT_FN (BUILT_IN_SINH
):
10464 if (validate_arg (arg0
, REAL_TYPE
))
10465 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10468 CASE_FLT_FN (BUILT_IN_COSH
):
10469 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10471 CASE_FLT_FN (BUILT_IN_TANH
):
10472 if (validate_arg (arg0
, REAL_TYPE
))
10473 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10476 CASE_FLT_FN (BUILT_IN_ERF
):
10477 if (validate_arg (arg0
, REAL_TYPE
))
10478 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10481 CASE_FLT_FN (BUILT_IN_ERFC
):
10482 if (validate_arg (arg0
, REAL_TYPE
))
10483 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10486 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10487 if (validate_arg (arg0
, REAL_TYPE
))
10488 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10491 CASE_FLT_FN (BUILT_IN_EXP
):
10492 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10494 CASE_FLT_FN (BUILT_IN_EXP2
):
10495 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10497 CASE_FLT_FN (BUILT_IN_EXP10
):
10498 CASE_FLT_FN (BUILT_IN_POW10
):
10499 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10501 CASE_FLT_FN (BUILT_IN_EXPM1
):
10502 if (validate_arg (arg0
, REAL_TYPE
))
10503 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10506 CASE_FLT_FN (BUILT_IN_LOG
):
10507 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10509 CASE_FLT_FN (BUILT_IN_LOG2
):
10510 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10512 CASE_FLT_FN (BUILT_IN_LOG10
):
10513 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10515 CASE_FLT_FN (BUILT_IN_LOG1P
):
10516 if (validate_arg (arg0
, REAL_TYPE
))
10517 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10518 &dconstm1
, NULL
, false);
10521 CASE_FLT_FN (BUILT_IN_J0
):
10522 if (validate_arg (arg0
, REAL_TYPE
))
10523 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10527 CASE_FLT_FN (BUILT_IN_J1
):
10528 if (validate_arg (arg0
, REAL_TYPE
))
10529 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10533 CASE_FLT_FN (BUILT_IN_Y0
):
10534 if (validate_arg (arg0
, REAL_TYPE
))
10535 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10536 &dconst0
, NULL
, false);
10539 CASE_FLT_FN (BUILT_IN_Y1
):
10540 if (validate_arg (arg0
, REAL_TYPE
))
10541 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10542 &dconst0
, NULL
, false);
10545 CASE_FLT_FN (BUILT_IN_NAN
):
10546 case BUILT_IN_NAND32
:
10547 case BUILT_IN_NAND64
:
10548 case BUILT_IN_NAND128
:
10549 return fold_builtin_nan (arg0
, type
, true);
10551 CASE_FLT_FN (BUILT_IN_NANS
):
10552 return fold_builtin_nan (arg0
, type
, false);
10554 CASE_FLT_FN (BUILT_IN_FLOOR
):
10555 return fold_builtin_floor (loc
, fndecl
, arg0
);
10557 CASE_FLT_FN (BUILT_IN_CEIL
):
10558 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10560 CASE_FLT_FN (BUILT_IN_TRUNC
):
10561 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10563 CASE_FLT_FN (BUILT_IN_ROUND
):
10564 return fold_builtin_round (loc
, fndecl
, arg0
);
10566 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10567 CASE_FLT_FN (BUILT_IN_RINT
):
10568 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10570 CASE_FLT_FN (BUILT_IN_ICEIL
):
10571 CASE_FLT_FN (BUILT_IN_LCEIL
):
10572 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10573 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10574 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10575 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10576 CASE_FLT_FN (BUILT_IN_IROUND
):
10577 CASE_FLT_FN (BUILT_IN_LROUND
):
10578 CASE_FLT_FN (BUILT_IN_LLROUND
):
10579 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10581 CASE_FLT_FN (BUILT_IN_IRINT
):
10582 CASE_FLT_FN (BUILT_IN_LRINT
):
10583 CASE_FLT_FN (BUILT_IN_LLRINT
):
10584 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10586 case BUILT_IN_BSWAP16
:
10587 case BUILT_IN_BSWAP32
:
10588 case BUILT_IN_BSWAP64
:
10589 return fold_builtin_bswap (fndecl
, arg0
);
10591 CASE_INT_FN (BUILT_IN_FFS
):
10592 CASE_INT_FN (BUILT_IN_CLZ
):
10593 CASE_INT_FN (BUILT_IN_CTZ
):
10594 CASE_INT_FN (BUILT_IN_CLRSB
):
10595 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10596 CASE_INT_FN (BUILT_IN_PARITY
):
10597 return fold_builtin_bitop (fndecl
, arg0
);
10599 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10600 return fold_builtin_signbit (loc
, arg0
, type
);
10602 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10603 return fold_builtin_significand (loc
, arg0
, type
);
10605 CASE_FLT_FN (BUILT_IN_ILOGB
):
10606 CASE_FLT_FN (BUILT_IN_LOGB
):
10607 return fold_builtin_logb (loc
, arg0
, type
);
10609 case BUILT_IN_ISASCII
:
10610 return fold_builtin_isascii (loc
, arg0
);
10612 case BUILT_IN_TOASCII
:
10613 return fold_builtin_toascii (loc
, arg0
);
10615 case BUILT_IN_ISDIGIT
:
10616 return fold_builtin_isdigit (loc
, arg0
);
10618 CASE_FLT_FN (BUILT_IN_FINITE
):
10619 case BUILT_IN_FINITED32
:
10620 case BUILT_IN_FINITED64
:
10621 case BUILT_IN_FINITED128
:
10622 case BUILT_IN_ISFINITE
:
10624 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10627 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10630 CASE_FLT_FN (BUILT_IN_ISINF
):
10631 case BUILT_IN_ISINFD32
:
10632 case BUILT_IN_ISINFD64
:
10633 case BUILT_IN_ISINFD128
:
10635 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10638 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10641 case BUILT_IN_ISNORMAL
:
10642 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10644 case BUILT_IN_ISINF_SIGN
:
10645 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10647 CASE_FLT_FN (BUILT_IN_ISNAN
):
10648 case BUILT_IN_ISNAND32
:
10649 case BUILT_IN_ISNAND64
:
10650 case BUILT_IN_ISNAND128
:
10651 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10653 case BUILT_IN_PRINTF
:
10654 case BUILT_IN_PRINTF_UNLOCKED
:
10655 case BUILT_IN_VPRINTF
:
10656 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10658 case BUILT_IN_FREE
:
10659 if (integer_zerop (arg0
))
10660 return build_empty_stmt (loc
);
10671 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10672 IGNORE is true if the result of the function call is ignored. This
10673 function returns NULL_TREE if no simplification was possible. */
10676 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10678 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10679 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10683 CASE_FLT_FN (BUILT_IN_JN
):
10684 if (validate_arg (arg0
, INTEGER_TYPE
)
10685 && validate_arg (arg1
, REAL_TYPE
))
10686 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10689 CASE_FLT_FN (BUILT_IN_YN
):
10690 if (validate_arg (arg0
, INTEGER_TYPE
)
10691 && validate_arg (arg1
, REAL_TYPE
))
10692 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10696 CASE_FLT_FN (BUILT_IN_DREM
):
10697 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10698 if (validate_arg (arg0
, REAL_TYPE
)
10699 && validate_arg(arg1
, REAL_TYPE
))
10700 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10703 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10704 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10705 if (validate_arg (arg0
, REAL_TYPE
)
10706 && validate_arg(arg1
, POINTER_TYPE
))
10707 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10710 CASE_FLT_FN (BUILT_IN_ATAN2
):
10711 if (validate_arg (arg0
, REAL_TYPE
)
10712 && validate_arg(arg1
, REAL_TYPE
))
10713 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10716 CASE_FLT_FN (BUILT_IN_FDIM
):
10717 if (validate_arg (arg0
, REAL_TYPE
)
10718 && validate_arg(arg1
, REAL_TYPE
))
10719 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10722 CASE_FLT_FN (BUILT_IN_HYPOT
):
10723 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10725 CASE_FLT_FN (BUILT_IN_CPOW
):
10726 if (validate_arg (arg0
, COMPLEX_TYPE
)
10727 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10728 && validate_arg (arg1
, COMPLEX_TYPE
)
10729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10730 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10733 CASE_FLT_FN (BUILT_IN_LDEXP
):
10734 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10735 CASE_FLT_FN (BUILT_IN_SCALBN
):
10736 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10737 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10738 type
, /*ldexp=*/false);
10740 CASE_FLT_FN (BUILT_IN_FREXP
):
10741 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10743 CASE_FLT_FN (BUILT_IN_MODF
):
10744 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10746 case BUILT_IN_BZERO
:
10747 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10749 case BUILT_IN_FPUTS
:
10750 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10752 case BUILT_IN_FPUTS_UNLOCKED
:
10753 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10755 case BUILT_IN_STRSTR
:
10756 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10758 case BUILT_IN_STRCAT
:
10759 return fold_builtin_strcat (loc
, arg0
, arg1
);
10761 case BUILT_IN_STRSPN
:
10762 return fold_builtin_strspn (loc
, arg0
, arg1
);
10764 case BUILT_IN_STRCSPN
:
10765 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10767 case BUILT_IN_STRCHR
:
10768 case BUILT_IN_INDEX
:
10769 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10771 case BUILT_IN_STRRCHR
:
10772 case BUILT_IN_RINDEX
:
10773 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10775 case BUILT_IN_STRCPY
:
10776 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10778 case BUILT_IN_STPCPY
:
10781 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10785 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10788 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10791 case BUILT_IN_STRCMP
:
10792 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10794 case BUILT_IN_STRPBRK
:
10795 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10797 case BUILT_IN_EXPECT
:
10798 return fold_builtin_expect (loc
, arg0
, arg1
);
10800 CASE_FLT_FN (BUILT_IN_POW
):
10801 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10803 CASE_FLT_FN (BUILT_IN_POWI
):
10804 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10806 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10807 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10809 CASE_FLT_FN (BUILT_IN_FMIN
):
10810 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10812 CASE_FLT_FN (BUILT_IN_FMAX
):
10813 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10815 case BUILT_IN_ISGREATER
:
10816 return fold_builtin_unordered_cmp (loc
, fndecl
,
10817 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10818 case BUILT_IN_ISGREATEREQUAL
:
10819 return fold_builtin_unordered_cmp (loc
, fndecl
,
10820 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10821 case BUILT_IN_ISLESS
:
10822 return fold_builtin_unordered_cmp (loc
, fndecl
,
10823 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10824 case BUILT_IN_ISLESSEQUAL
:
10825 return fold_builtin_unordered_cmp (loc
, fndecl
,
10826 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10827 case BUILT_IN_ISLESSGREATER
:
10828 return fold_builtin_unordered_cmp (loc
, fndecl
,
10829 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10830 case BUILT_IN_ISUNORDERED
:
10831 return fold_builtin_unordered_cmp (loc
, fndecl
,
10832 arg0
, arg1
, UNORDERED_EXPR
,
10835 /* We do the folding for va_start in the expander. */
10836 case BUILT_IN_VA_START
:
10839 case BUILT_IN_SPRINTF
:
10840 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10842 case BUILT_IN_OBJECT_SIZE
:
10843 return fold_builtin_object_size (arg0
, arg1
);
10845 case BUILT_IN_PRINTF
:
10846 case BUILT_IN_PRINTF_UNLOCKED
:
10847 case BUILT_IN_VPRINTF
:
10848 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10850 case BUILT_IN_PRINTF_CHK
:
10851 case BUILT_IN_VPRINTF_CHK
:
10852 if (!validate_arg (arg0
, INTEGER_TYPE
)
10853 || TREE_SIDE_EFFECTS (arg0
))
10856 return fold_builtin_printf (loc
, fndecl
,
10857 arg1
, NULL_TREE
, ignore
, fcode
);
10860 case BUILT_IN_FPRINTF
:
10861 case BUILT_IN_FPRINTF_UNLOCKED
:
10862 case BUILT_IN_VFPRINTF
:
10863 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10866 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10867 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10869 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10870 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10878 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10879 and ARG2. IGNORE is true if the result of the function call is ignored.
10880 This function returns NULL_TREE if no simplification was possible. */
10883 fold_builtin_3 (location_t loc
, tree fndecl
,
10884 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10886 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10887 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10891 CASE_FLT_FN (BUILT_IN_SINCOS
):
10892 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10894 CASE_FLT_FN (BUILT_IN_FMA
):
10895 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10898 CASE_FLT_FN (BUILT_IN_REMQUO
):
10899 if (validate_arg (arg0
, REAL_TYPE
)
10900 && validate_arg(arg1
, REAL_TYPE
)
10901 && validate_arg(arg2
, POINTER_TYPE
))
10902 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10905 case BUILT_IN_MEMSET
:
10906 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10908 case BUILT_IN_BCOPY
:
10909 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10910 void_type_node
, true, /*endp=*/3);
10912 case BUILT_IN_MEMCPY
:
10913 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10914 type
, ignore
, /*endp=*/0);
10916 case BUILT_IN_MEMPCPY
:
10917 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10918 type
, ignore
, /*endp=*/1);
10920 case BUILT_IN_MEMMOVE
:
10921 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10922 type
, ignore
, /*endp=*/3);
10924 case BUILT_IN_STRNCAT
:
10925 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10927 case BUILT_IN_STRNCPY
:
10928 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10930 case BUILT_IN_STRNCMP
:
10931 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10933 case BUILT_IN_MEMCHR
:
10934 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10936 case BUILT_IN_BCMP
:
10937 case BUILT_IN_MEMCMP
:
10938 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10940 case BUILT_IN_SPRINTF
:
10941 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10943 case BUILT_IN_SNPRINTF
:
10944 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10946 case BUILT_IN_STRCPY_CHK
:
10947 case BUILT_IN_STPCPY_CHK
:
10948 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10951 case BUILT_IN_STRCAT_CHK
:
10952 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10954 case BUILT_IN_PRINTF_CHK
:
10955 case BUILT_IN_VPRINTF_CHK
:
10956 if (!validate_arg (arg0
, INTEGER_TYPE
)
10957 || TREE_SIDE_EFFECTS (arg0
))
10960 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10963 case BUILT_IN_FPRINTF
:
10964 case BUILT_IN_FPRINTF_UNLOCKED
:
10965 case BUILT_IN_VFPRINTF
:
10966 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10969 case BUILT_IN_FPRINTF_CHK
:
10970 case BUILT_IN_VFPRINTF_CHK
:
10971 if (!validate_arg (arg1
, INTEGER_TYPE
)
10972 || TREE_SIDE_EFFECTS (arg1
))
10975 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10984 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10985 ARG2, and ARG3. IGNORE is true if the result of the function call is
10986 ignored. This function returns NULL_TREE if no simplification was
10990 fold_builtin_4 (location_t loc
, tree fndecl
,
10991 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10993 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10997 case BUILT_IN_MEMCPY_CHK
:
10998 case BUILT_IN_MEMPCPY_CHK
:
10999 case BUILT_IN_MEMMOVE_CHK
:
11000 case BUILT_IN_MEMSET_CHK
:
11001 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
11003 DECL_FUNCTION_CODE (fndecl
));
11005 case BUILT_IN_STRNCPY_CHK
:
11006 case BUILT_IN_STPNCPY_CHK
:
11007 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
11010 case BUILT_IN_STRNCAT_CHK
:
11011 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
11013 case BUILT_IN_SNPRINTF
:
11014 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
11016 case BUILT_IN_FPRINTF_CHK
:
11017 case BUILT_IN_VFPRINTF_CHK
:
11018 if (!validate_arg (arg1
, INTEGER_TYPE
)
11019 || TREE_SIDE_EFFECTS (arg1
))
11022 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
11032 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11033 arguments, where NARGS <= 4. IGNORE is true if the result of the
11034 function call is ignored. This function returns NULL_TREE if no
11035 simplification was possible. Note that this only folds builtins with
11036 fixed argument patterns. Foldings that do varargs-to-varargs
11037 transformations, or that match calls with more than 4 arguments,
11038 need to be handled with fold_builtin_varargs instead. */
11040 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11043 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
11045 tree ret
= NULL_TREE
;
11050 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
11053 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
11056 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
11059 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
11062 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
11070 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11071 SET_EXPR_LOCATION (ret
, loc
);
11072 TREE_NO_WARNING (ret
) = 1;
11078 /* Builtins with folding operations that operate on "..." arguments
11079 need special handling; we need to store the arguments in a convenient
11080 data structure before attempting any folding. Fortunately there are
11081 only a few builtins that fall into this category. FNDECL is the
11082 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11083 result of the function call is ignored. */
11086 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
11087 bool ignore ATTRIBUTE_UNUSED
)
11089 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11090 tree ret
= NULL_TREE
;
11094 case BUILT_IN_SPRINTF_CHK
:
11095 case BUILT_IN_VSPRINTF_CHK
:
11096 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
11099 case BUILT_IN_SNPRINTF_CHK
:
11100 case BUILT_IN_VSNPRINTF_CHK
:
11101 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
11104 case BUILT_IN_FPCLASSIFY
:
11105 ret
= fold_builtin_fpclassify (loc
, exp
);
11113 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11114 SET_EXPR_LOCATION (ret
, loc
);
11115 TREE_NO_WARNING (ret
) = 1;
11121 /* Return true if FNDECL shouldn't be folded right now.
11122 If a built-in function has an inline attribute always_inline
11123 wrapper, defer folding it after always_inline functions have
11124 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11125 might not be performed. */
11128 avoid_folding_inline_builtin (tree fndecl
)
11130 return (DECL_DECLARED_INLINE_P (fndecl
)
11131 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11133 && !cfun
->always_inline_functions_inlined
11134 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11137 /* A wrapper function for builtin folding that prevents warnings for
11138 "statement without effect" and the like, caused by removing the
11139 call node earlier than the warning is generated. */
11142 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11144 tree ret
= NULL_TREE
;
11145 tree fndecl
= get_callee_fndecl (exp
);
11147 && TREE_CODE (fndecl
) == FUNCTION_DECL
11148 && DECL_BUILT_IN (fndecl
)
11149 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11150 yet. Defer folding until we see all the arguments
11151 (after inlining). */
11152 && !CALL_EXPR_VA_ARG_PACK (exp
))
11154 int nargs
= call_expr_nargs (exp
);
11156 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11157 instead last argument is __builtin_va_arg_pack (). Defer folding
11158 even in that case, until arguments are finalized. */
11159 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11161 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11163 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11164 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11165 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11169 if (avoid_folding_inline_builtin (fndecl
))
11172 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11173 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11174 CALL_EXPR_ARGP (exp
), ignore
);
11177 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11179 tree
*args
= CALL_EXPR_ARGP (exp
);
11180 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11183 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11191 /* Conveniently construct a function call expression. FNDECL names the
11192 function to be called and N arguments are passed in the array
11196 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11198 tree fntype
= TREE_TYPE (fndecl
);
11199 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11201 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11204 /* Conveniently construct a function call expression. FNDECL names the
11205 function to be called and the arguments are passed in the vector
11209 build_call_expr_loc_vec (location_t loc
, tree fndecl
, VEC(tree
,gc
) *vec
)
11211 return build_call_expr_loc_array (loc
, fndecl
, VEC_length (tree
, vec
),
11212 VEC_address (tree
, vec
));
11216 /* Conveniently construct a function call expression. FNDECL names the
11217 function to be called, N is the number of arguments, and the "..."
11218 parameters are the argument expressions. */
11221 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11224 tree
*argarray
= XALLOCAVEC (tree
, n
);
11228 for (i
= 0; i
< n
; i
++)
11229 argarray
[i
] = va_arg (ap
, tree
);
11231 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11234 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11235 varargs macros aren't supported by all bootstrap compilers. */
11238 build_call_expr (tree fndecl
, int n
, ...)
11241 tree
*argarray
= XALLOCAVEC (tree
, n
);
11245 for (i
= 0; i
< n
; i
++)
11246 argarray
[i
] = va_arg (ap
, tree
);
11248 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11251 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11252 N arguments are passed in the array ARGARRAY. */
11255 fold_builtin_call_array (location_t loc
, tree type
,
11260 tree ret
= NULL_TREE
;
11263 if (TREE_CODE (fn
) == ADDR_EXPR
)
11265 tree fndecl
= TREE_OPERAND (fn
, 0);
11266 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11267 && DECL_BUILT_IN (fndecl
))
11269 /* If last argument is __builtin_va_arg_pack (), arguments to this
11270 function are not finalized yet. Defer folding until they are. */
11271 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11273 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11275 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11276 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11277 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11278 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11280 if (avoid_folding_inline_builtin (fndecl
))
11281 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11282 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11284 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11288 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11290 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11292 /* First try the transformations that don't require consing up
11294 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11299 /* If we got this far, we need to build an exp. */
11300 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11301 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11302 return ret
? ret
: exp
;
11306 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11309 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11310 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11311 of arguments in ARGS to be omitted. OLDNARGS is the number of
11312 elements in ARGS. */
11315 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11316 int skip
, tree fndecl
, int n
, va_list newargs
)
11318 int nargs
= oldnargs
- skip
+ n
;
11325 buffer
= XALLOCAVEC (tree
, nargs
);
11326 for (i
= 0; i
< n
; i
++)
11327 buffer
[i
] = va_arg (newargs
, tree
);
11328 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11329 buffer
[i
] = args
[j
];
11332 buffer
= args
+ skip
;
11334 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11337 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11338 list ARGS along with N new arguments specified as the "..."
11339 parameters. SKIP is the number of arguments in ARGS to be omitted.
11340 OLDNARGS is the number of elements in ARGS. */
11343 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11344 int skip
, tree fndecl
, int n
, ...)
11350 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11356 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11357 along with N new arguments specified as the "..." parameters. SKIP
11358 is the number of arguments in EXP to be omitted. This function is used
11359 to do varargs-to-varargs transformations. */
11362 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11368 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11369 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11375 /* Validate a single argument ARG against a tree code CODE representing
11379 validate_arg (const_tree arg
, enum tree_code code
)
11383 else if (code
== POINTER_TYPE
)
11384 return POINTER_TYPE_P (TREE_TYPE (arg
));
11385 else if (code
== INTEGER_TYPE
)
11386 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11387 return code
== TREE_CODE (TREE_TYPE (arg
));
11390 /* This function validates the types of a function call argument list
11391 against a specified list of tree_codes. If the last specifier is a 0,
11392 that represents an ellipses, otherwise the last specifier must be a
11395 This is the GIMPLE version of validate_arglist. Eventually we want to
11396 completely convert builtins.c to work from GIMPLEs and the tree based
11397 validate_arglist will then be removed. */
11400 validate_gimple_arglist (const_gimple call
, ...)
11402 enum tree_code code
;
11408 va_start (ap
, call
);
11413 code
= (enum tree_code
) va_arg (ap
, int);
11417 /* This signifies an ellipses, any further arguments are all ok. */
11421 /* This signifies an endlink, if no arguments remain, return
11422 true, otherwise return false. */
11423 res
= (i
== gimple_call_num_args (call
));
11426 /* If no parameters remain or the parameter's code does not
11427 match the specified code, return false. Otherwise continue
11428 checking any remaining arguments. */
11429 arg
= gimple_call_arg (call
, i
++);
11430 if (!validate_arg (arg
, code
))
11437 /* We need gotos here since we can only have one VA_CLOSE in a
11445 /* This function validates the types of a function call argument list
11446 against a specified list of tree_codes. If the last specifier is a 0,
11447 that represents an ellipses, otherwise the last specifier must be a
11451 validate_arglist (const_tree callexpr
, ...)
11453 enum tree_code code
;
11456 const_call_expr_arg_iterator iter
;
11459 va_start (ap
, callexpr
);
11460 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11464 code
= (enum tree_code
) va_arg (ap
, int);
11468 /* This signifies an ellipses, any further arguments are all ok. */
11472 /* This signifies an endlink, if no arguments remain, return
11473 true, otherwise return false. */
11474 res
= !more_const_call_expr_args_p (&iter
);
11477 /* If no parameters remain or the parameter's code does not
11478 match the specified code, return false. Otherwise continue
11479 checking any remaining arguments. */
11480 arg
= next_const_call_expr_arg (&iter
);
11481 if (!validate_arg (arg
, code
))
11488 /* We need gotos here since we can only have one VA_CLOSE in a
11496 /* Default target-specific builtin expander that does nothing. */
11499 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11500 rtx target ATTRIBUTE_UNUSED
,
11501 rtx subtarget ATTRIBUTE_UNUSED
,
11502 enum machine_mode mode ATTRIBUTE_UNUSED
,
11503 int ignore ATTRIBUTE_UNUSED
)
11508 /* Returns true is EXP represents data that would potentially reside
11509 in a readonly section. */
11512 readonly_data_expr (tree exp
)
11516 if (TREE_CODE (exp
) != ADDR_EXPR
)
11519 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11523 /* Make sure we call decl_readonly_section only for trees it
11524 can handle (since it returns true for everything it doesn't
11526 if (TREE_CODE (exp
) == STRING_CST
11527 || TREE_CODE (exp
) == CONSTRUCTOR
11528 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11529 return decl_readonly_section (exp
, 0);
11534 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11535 to the call, and TYPE is its return type.
11537 Return NULL_TREE if no simplification was possible, otherwise return the
11538 simplified form of the call as a tree.
11540 The simplified form may be a constant or other expression which
11541 computes the same value, but in a more efficient manner (including
11542 calls to other builtin functions).
11544 The call may contain arguments which need to be evaluated, but
11545 which are not useful to determine the result of the call. In
11546 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11547 COMPOUND_EXPR will be an argument which must be evaluated.
11548 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11549 COMPOUND_EXPR in the chain will contain the tree for the simplified
11550 form of the builtin function call. */
11553 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11555 if (!validate_arg (s1
, POINTER_TYPE
)
11556 || !validate_arg (s2
, POINTER_TYPE
))
11561 const char *p1
, *p2
;
11563 p2
= c_getstr (s2
);
11567 p1
= c_getstr (s1
);
11570 const char *r
= strstr (p1
, p2
);
11574 return build_int_cst (TREE_TYPE (s1
), 0);
11576 /* Return an offset into the constant string argument. */
11577 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11578 return fold_convert_loc (loc
, type
, tem
);
11581 /* The argument is const char *, and the result is char *, so we need
11582 a type conversion here to avoid a warning. */
11584 return fold_convert_loc (loc
, type
, s1
);
11589 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11593 /* New argument list transforming strstr(s1, s2) to
11594 strchr(s1, s2[0]). */
11595 return build_call_expr_loc (loc
, fn
, 2, s1
,
11596 build_int_cst (integer_type_node
, p2
[0]));
11600 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11601 the call, and TYPE is its return type.
11603 Return NULL_TREE if no simplification was possible, otherwise return the
11604 simplified form of the call as a tree.
11606 The simplified form may be a constant or other expression which
11607 computes the same value, but in a more efficient manner (including
11608 calls to other builtin functions).
11610 The call may contain arguments which need to be evaluated, but
11611 which are not useful to determine the result of the call. In
11612 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11613 COMPOUND_EXPR will be an argument which must be evaluated.
11614 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11615 COMPOUND_EXPR in the chain will contain the tree for the simplified
11616 form of the builtin function call. */
11619 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11621 if (!validate_arg (s1
, POINTER_TYPE
)
11622 || !validate_arg (s2
, INTEGER_TYPE
))
11628 if (TREE_CODE (s2
) != INTEGER_CST
)
11631 p1
= c_getstr (s1
);
11638 if (target_char_cast (s2
, &c
))
11641 r
= strchr (p1
, c
);
11644 return build_int_cst (TREE_TYPE (s1
), 0);
11646 /* Return an offset into the constant string argument. */
11647 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11648 return fold_convert_loc (loc
, type
, tem
);
11654 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11655 the call, and TYPE is its return type.
11657 Return NULL_TREE if no simplification was possible, otherwise return the
11658 simplified form of the call as a tree.
11660 The simplified form may be a constant or other expression which
11661 computes the same value, but in a more efficient manner (including
11662 calls to other builtin functions).
11664 The call may contain arguments which need to be evaluated, but
11665 which are not useful to determine the result of the call. In
11666 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11667 COMPOUND_EXPR will be an argument which must be evaluated.
11668 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11669 COMPOUND_EXPR in the chain will contain the tree for the simplified
11670 form of the builtin function call. */
11673 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11675 if (!validate_arg (s1
, POINTER_TYPE
)
11676 || !validate_arg (s2
, INTEGER_TYPE
))
11683 if (TREE_CODE (s2
) != INTEGER_CST
)
11686 p1
= c_getstr (s1
);
11693 if (target_char_cast (s2
, &c
))
11696 r
= strrchr (p1
, c
);
11699 return build_int_cst (TREE_TYPE (s1
), 0);
11701 /* Return an offset into the constant string argument. */
11702 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11703 return fold_convert_loc (loc
, type
, tem
);
11706 if (! integer_zerop (s2
))
11709 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11713 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11714 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11718 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11719 to the call, and TYPE is its return type.
11721 Return NULL_TREE if no simplification was possible, otherwise return the
11722 simplified form of the call as a tree.
11724 The simplified form may be a constant or other expression which
11725 computes the same value, but in a more efficient manner (including
11726 calls to other builtin functions).
11728 The call may contain arguments which need to be evaluated, but
11729 which are not useful to determine the result of the call. In
11730 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11731 COMPOUND_EXPR will be an argument which must be evaluated.
11732 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11733 COMPOUND_EXPR in the chain will contain the tree for the simplified
11734 form of the builtin function call. */
11737 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11739 if (!validate_arg (s1
, POINTER_TYPE
)
11740 || !validate_arg (s2
, POINTER_TYPE
))
11745 const char *p1
, *p2
;
11747 p2
= c_getstr (s2
);
11751 p1
= c_getstr (s1
);
11754 const char *r
= strpbrk (p1
, p2
);
11758 return build_int_cst (TREE_TYPE (s1
), 0);
11760 /* Return an offset into the constant string argument. */
11761 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11762 return fold_convert_loc (loc
, type
, tem
);
11766 /* strpbrk(x, "") == NULL.
11767 Evaluate and ignore s1 in case it had side-effects. */
11768 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11771 return NULL_TREE
; /* Really call strpbrk. */
11773 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11777 /* New argument list transforming strpbrk(s1, s2) to
11778 strchr(s1, s2[0]). */
11779 return build_call_expr_loc (loc
, fn
, 2, s1
,
11780 build_int_cst (integer_type_node
, p2
[0]));
11784 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11787 Return NULL_TREE if no simplification was possible, otherwise return the
11788 simplified form of the call as a tree.
11790 The simplified form may be a constant or other expression which
11791 computes the same value, but in a more efficient manner (including
11792 calls to other builtin functions).
11794 The call may contain arguments which need to be evaluated, but
11795 which are not useful to determine the result of the call. In
11796 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11797 COMPOUND_EXPR will be an argument which must be evaluated.
11798 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11799 COMPOUND_EXPR in the chain will contain the tree for the simplified
11800 form of the builtin function call. */
11803 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11805 if (!validate_arg (dst
, POINTER_TYPE
)
11806 || !validate_arg (src
, POINTER_TYPE
))
11810 const char *p
= c_getstr (src
);
11812 /* If the string length is zero, return the dst parameter. */
11813 if (p
&& *p
== '\0')
11816 if (optimize_insn_for_speed_p ())
11818 /* See if we can store by pieces into (dst + strlen(dst)). */
11820 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11821 tree strcpy_fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11823 if (!strlen_fn
|| !strcpy_fn
)
11826 /* If we don't have a movstr we don't want to emit an strcpy
11827 call. We have to do that if the length of the source string
11828 isn't computable (in that case we can use memcpy probably
11829 later expanding to a sequence of mov instructions). If we
11830 have movstr instructions we can emit strcpy calls. */
11833 tree len
= c_strlen (src
, 1);
11834 if (! len
|| TREE_SIDE_EFFECTS (len
))
11838 /* Stabilize the argument list. */
11839 dst
= builtin_save_expr (dst
);
11841 /* Create strlen (dst). */
11842 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11843 /* Create (dst p+ strlen (dst)). */
11845 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11846 newdst
= builtin_save_expr (newdst
);
11848 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11849 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11855 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11856 arguments to the call.
11858 Return NULL_TREE if no simplification was possible, otherwise return the
11859 simplified form of the call as a tree.
11861 The simplified form may be a constant or other expression which
11862 computes the same value, but in a more efficient manner (including
11863 calls to other builtin functions).
11865 The call may contain arguments which need to be evaluated, but
11866 which are not useful to determine the result of the call. In
11867 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11868 COMPOUND_EXPR will be an argument which must be evaluated.
11869 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11870 COMPOUND_EXPR in the chain will contain the tree for the simplified
11871 form of the builtin function call. */
11874 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11876 if (!validate_arg (dst
, POINTER_TYPE
)
11877 || !validate_arg (src
, POINTER_TYPE
)
11878 || !validate_arg (len
, INTEGER_TYPE
))
11882 const char *p
= c_getstr (src
);
11884 /* If the requested length is zero, or the src parameter string
11885 length is zero, return the dst parameter. */
11886 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11887 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11889 /* If the requested len is greater than or equal to the string
11890 length, call strcat. */
11891 if (TREE_CODE (len
) == INTEGER_CST
&& p
11892 && compare_tree_int (len
, strlen (p
)) >= 0)
11894 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11896 /* If the replacement _DECL isn't initialized, don't do the
11901 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11907 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11910 Return NULL_TREE if no simplification was possible, otherwise return the
11911 simplified form of the call as a tree.
11913 The simplified form may be a constant or other expression which
11914 computes the same value, but in a more efficient manner (including
11915 calls to other builtin functions).
11917 The call may contain arguments which need to be evaluated, but
11918 which are not useful to determine the result of the call. In
11919 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11920 COMPOUND_EXPR will be an argument which must be evaluated.
11921 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11922 COMPOUND_EXPR in the chain will contain the tree for the simplified
11923 form of the builtin function call. */
11926 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11928 if (!validate_arg (s1
, POINTER_TYPE
)
11929 || !validate_arg (s2
, POINTER_TYPE
))
11933 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11935 /* If both arguments are constants, evaluate at compile-time. */
11938 const size_t r
= strspn (p1
, p2
);
11939 return build_int_cst (size_type_node
, r
);
11942 /* If either argument is "", return NULL_TREE. */
11943 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11944 /* Evaluate and ignore both arguments in case either one has
11946 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11952 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11955 Return NULL_TREE if no simplification was possible, otherwise return the
11956 simplified form of the call as a tree.
11958 The simplified form may be a constant or other expression which
11959 computes the same value, but in a more efficient manner (including
11960 calls to other builtin functions).
11962 The call may contain arguments which need to be evaluated, but
11963 which are not useful to determine the result of the call. In
11964 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11965 COMPOUND_EXPR will be an argument which must be evaluated.
11966 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11967 COMPOUND_EXPR in the chain will contain the tree for the simplified
11968 form of the builtin function call. */
11971 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11973 if (!validate_arg (s1
, POINTER_TYPE
)
11974 || !validate_arg (s2
, POINTER_TYPE
))
11978 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11980 /* If both arguments are constants, evaluate at compile-time. */
11983 const size_t r
= strcspn (p1
, p2
);
11984 return build_int_cst (size_type_node
, r
);
11987 /* If the first argument is "", return NULL_TREE. */
11988 if (p1
&& *p1
== '\0')
11990 /* Evaluate and ignore argument s2 in case it has
11992 return omit_one_operand_loc (loc
, size_type_node
,
11993 size_zero_node
, s2
);
11996 /* If the second argument is "", return __builtin_strlen(s1). */
11997 if (p2
&& *p2
== '\0')
11999 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
12001 /* If the replacement _DECL isn't initialized, don't do the
12006 return build_call_expr_loc (loc
, fn
, 1, s1
);
12012 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12013 to the call. IGNORE is true if the value returned
12014 by the builtin will be ignored. UNLOCKED is true is true if this
12015 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12016 the known length of the string. Return NULL_TREE if no simplification
12020 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
12021 bool ignore
, bool unlocked
, tree len
)
12023 /* If we're using an unlocked function, assume the other unlocked
12024 functions exist explicitly. */
12025 tree
const fn_fputc
= (unlocked
12026 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
12027 : builtin_decl_implicit (BUILT_IN_FPUTC
));
12028 tree
const fn_fwrite
= (unlocked
12029 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
12030 : builtin_decl_implicit (BUILT_IN_FWRITE
));
12032 /* If the return value is used, don't do the transformation. */
12036 /* Verify the arguments in the original call. */
12037 if (!validate_arg (arg0
, POINTER_TYPE
)
12038 || !validate_arg (arg1
, POINTER_TYPE
))
12042 len
= c_strlen (arg0
, 0);
12044 /* Get the length of the string passed to fputs. If the length
12045 can't be determined, punt. */
12047 || TREE_CODE (len
) != INTEGER_CST
)
12050 switch (compare_tree_int (len
, 1))
12052 case -1: /* length is 0, delete the call entirely . */
12053 return omit_one_operand_loc (loc
, integer_type_node
,
12054 integer_zero_node
, arg1
);;
12056 case 0: /* length is 1, call fputc. */
12058 const char *p
= c_getstr (arg0
);
12063 return build_call_expr_loc (loc
, fn_fputc
, 2,
12065 (integer_type_node
, p
[0]), arg1
);
12071 case 1: /* length is greater than 1, call fwrite. */
12073 /* If optimizing for size keep fputs. */
12074 if (optimize_function_for_size_p (cfun
))
12076 /* New argument list transforming fputs(string, stream) to
12077 fwrite(string, 1, len, stream). */
12079 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
12080 size_one_node
, len
, arg1
);
12085 gcc_unreachable ();
12090 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12091 produced. False otherwise. This is done so that we don't output the error
12092 or warning twice or three times. */
12095 fold_builtin_next_arg (tree exp
, bool va_start_p
)
12097 tree fntype
= TREE_TYPE (current_function_decl
);
12098 int nargs
= call_expr_nargs (exp
);
12100 /* There is good chance the current input_location points inside the
12101 definition of the va_start macro (perhaps on the token for
12102 builtin) in a system header, so warnings will not be emitted.
12103 Use the location in real source code. */
12104 source_location current_location
=
12105 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
12108 if (!stdarg_p (fntype
))
12110 error ("%<va_start%> used in function with fixed args");
12116 if (va_start_p
&& (nargs
!= 2))
12118 error ("wrong number of arguments to function %<va_start%>");
12121 arg
= CALL_EXPR_ARG (exp
, 1);
12123 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12124 when we checked the arguments and if needed issued a warning. */
12129 /* Evidently an out of date version of <stdarg.h>; can't validate
12130 va_start's second argument, but can still work as intended. */
12131 warning_at (current_location
,
12133 "%<__builtin_next_arg%> called without an argument");
12136 else if (nargs
> 1)
12138 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12141 arg
= CALL_EXPR_ARG (exp
, 0);
12144 if (TREE_CODE (arg
) == SSA_NAME
)
12145 arg
= SSA_NAME_VAR (arg
);
12147 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12148 or __builtin_next_arg (0) the first time we see it, after checking
12149 the arguments and if needed issuing a warning. */
12150 if (!integer_zerop (arg
))
12152 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12154 /* Strip off all nops for the sake of the comparison. This
12155 is not quite the same as STRIP_NOPS. It does more.
12156 We must also strip off INDIRECT_EXPR for C++ reference
12158 while (CONVERT_EXPR_P (arg
)
12159 || TREE_CODE (arg
) == INDIRECT_REF
)
12160 arg
= TREE_OPERAND (arg
, 0);
12161 if (arg
!= last_parm
)
12163 /* FIXME: Sometimes with the tree optimizers we can get the
12164 not the last argument even though the user used the last
12165 argument. We just warn and set the arg to be the last
12166 argument so that we will get wrong-code because of
12168 warning_at (current_location
,
12170 "second parameter of %<va_start%> not last named argument");
12173 /* Undefined by C99 7.15.1.4p4 (va_start):
12174 "If the parameter parmN is declared with the register storage
12175 class, with a function or array type, or with a type that is
12176 not compatible with the type that results after application of
12177 the default argument promotions, the behavior is undefined."
12179 else if (DECL_REGISTER (arg
))
12181 warning_at (current_location
,
12183 "undefined behaviour when second parameter of "
12184 "%<va_start%> is declared with %<register%> storage");
12187 /* We want to verify the second parameter just once before the tree
12188 optimizers are run and then avoid keeping it in the tree,
12189 as otherwise we could warn even for correct code like:
12190 void foo (int i, ...)
12191 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12193 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12195 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12201 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12202 ORIG may be null if this is a 2-argument call. We don't attempt to
12203 simplify calls with more than 3 arguments.
12205 Return NULL_TREE if no simplification was possible, otherwise return the
12206 simplified form of the call as a tree. If IGNORED is true, it means that
12207 the caller does not use the returned value of the function. */
12210 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12211 tree orig
, int ignored
)
12214 const char *fmt_str
= NULL
;
12216 /* Verify the required arguments in the original call. We deal with two
12217 types of sprintf() calls: 'sprintf (str, fmt)' and
12218 'sprintf (dest, "%s", orig)'. */
12219 if (!validate_arg (dest
, POINTER_TYPE
)
12220 || !validate_arg (fmt
, POINTER_TYPE
))
12222 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12225 /* Check whether the format is a literal string constant. */
12226 fmt_str
= c_getstr (fmt
);
12227 if (fmt_str
== NULL
)
12231 retval
= NULL_TREE
;
12233 if (!init_target_chars ())
12236 /* If the format doesn't contain % args or %%, use strcpy. */
12237 if (strchr (fmt_str
, target_percent
) == NULL
)
12239 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12244 /* Don't optimize sprintf (buf, "abc", ptr++). */
12248 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12249 'format' is known to contain no % formats. */
12250 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12252 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12255 /* If the format is "%s", use strcpy if the result isn't used. */
12256 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12259 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12264 /* Don't crash on sprintf (str1, "%s"). */
12268 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12271 retval
= c_strlen (orig
, 1);
12272 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12275 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12278 if (call
&& retval
)
12280 retval
= fold_convert_loc
12281 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12283 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12289 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12290 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12291 attempt to simplify calls with more than 4 arguments.
12293 Return NULL_TREE if no simplification was possible, otherwise return the
12294 simplified form of the call as a tree. If IGNORED is true, it means that
12295 the caller does not use the returned value of the function. */
12298 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12299 tree orig
, int ignored
)
12302 const char *fmt_str
= NULL
;
12303 unsigned HOST_WIDE_INT destlen
;
12305 /* Verify the required arguments in the original call. We deal with two
12306 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12307 'snprintf (dest, cst, "%s", orig)'. */
12308 if (!validate_arg (dest
, POINTER_TYPE
)
12309 || !validate_arg (destsize
, INTEGER_TYPE
)
12310 || !validate_arg (fmt
, POINTER_TYPE
))
12312 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12315 if (!host_integerp (destsize
, 1))
12318 /* Check whether the format is a literal string constant. */
12319 fmt_str
= c_getstr (fmt
);
12320 if (fmt_str
== NULL
)
12324 retval
= NULL_TREE
;
12326 if (!init_target_chars ())
12329 destlen
= tree_low_cst (destsize
, 1);
12331 /* If the format doesn't contain % args or %%, use strcpy. */
12332 if (strchr (fmt_str
, target_percent
) == NULL
)
12334 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12335 size_t len
= strlen (fmt_str
);
12337 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12341 /* We could expand this as
12342 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12344 memcpy (str, fmt_with_nul_at_cstm1, cst);
12345 but in the former case that might increase code size
12346 and in the latter case grow .rodata section too much.
12347 So punt for now. */
12348 if (len
>= destlen
)
12354 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12355 'format' is known to contain no % formats and
12356 strlen (fmt) < cst. */
12357 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12360 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12363 /* If the format is "%s", use strcpy if the result isn't used. */
12364 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12366 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12367 unsigned HOST_WIDE_INT origlen
;
12369 /* Don't crash on snprintf (str1, cst, "%s"). */
12373 retval
= c_strlen (orig
, 1);
12374 if (!retval
|| !host_integerp (retval
, 1))
12377 origlen
= tree_low_cst (retval
, 1);
12378 /* We could expand this as
12379 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12381 memcpy (str1, str2_with_nul_at_cstm1, cst);
12382 but in the former case that might increase code size
12383 and in the latter case grow .rodata section too much.
12384 So punt for now. */
12385 if (origlen
>= destlen
)
12388 /* Convert snprintf (str1, cst, "%s", str2) into
12389 strcpy (str1, str2) if strlen (str2) < cst. */
12393 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12396 retval
= NULL_TREE
;
12399 if (call
&& retval
)
12401 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12402 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12403 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12409 /* Expand a call EXP to __builtin_object_size. */
12412 expand_builtin_object_size (tree exp
)
12415 int object_size_type
;
12416 tree fndecl
= get_callee_fndecl (exp
);
12418 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12420 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12422 expand_builtin_trap ();
12426 ost
= CALL_EXPR_ARG (exp
, 1);
12429 if (TREE_CODE (ost
) != INTEGER_CST
12430 || tree_int_cst_sgn (ost
) < 0
12431 || compare_tree_int (ost
, 3) > 0)
12433 error ("%Klast argument of %D is not integer constant between 0 and 3",
12435 expand_builtin_trap ();
12439 object_size_type
= tree_low_cst (ost
, 0);
12441 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12444 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12445 FCODE is the BUILT_IN_* to use.
12446 Return NULL_RTX if we failed; the caller should emit a normal call,
12447 otherwise try to get the result in TARGET, if convenient (and in
12448 mode MODE if that's convenient). */
12451 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12452 enum built_in_function fcode
)
12454 tree dest
, src
, len
, size
;
12456 if (!validate_arglist (exp
,
12458 fcode
== BUILT_IN_MEMSET_CHK
12459 ? INTEGER_TYPE
: POINTER_TYPE
,
12460 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12463 dest
= CALL_EXPR_ARG (exp
, 0);
12464 src
= CALL_EXPR_ARG (exp
, 1);
12465 len
= CALL_EXPR_ARG (exp
, 2);
12466 size
= CALL_EXPR_ARG (exp
, 3);
12468 if (! host_integerp (size
, 1))
12471 if (host_integerp (len
, 1) || integer_all_onesp (size
))
12475 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12477 warning_at (tree_nonartificial_location (exp
),
12478 0, "%Kcall to %D will always overflow destination buffer",
12479 exp
, get_callee_fndecl (exp
));
12484 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12485 mem{cpy,pcpy,move,set} is available. */
12488 case BUILT_IN_MEMCPY_CHK
:
12489 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12491 case BUILT_IN_MEMPCPY_CHK
:
12492 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12494 case BUILT_IN_MEMMOVE_CHK
:
12495 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12497 case BUILT_IN_MEMSET_CHK
:
12498 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12507 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12508 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12509 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12510 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12512 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12516 unsigned int dest_align
= get_pointer_alignment (dest
);
12518 /* If DEST is not a pointer type, call the normal function. */
12519 if (dest_align
== 0)
12522 /* If SRC and DEST are the same (and not volatile), do nothing. */
12523 if (operand_equal_p (src
, dest
, 0))
12527 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12529 /* Evaluate and ignore LEN in case it has side-effects. */
12530 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12531 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12534 expr
= fold_build_pointer_plus (dest
, len
);
12535 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12538 /* __memmove_chk special case. */
12539 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12541 unsigned int src_align
= get_pointer_alignment (src
);
12543 if (src_align
== 0)
12546 /* If src is categorized for a readonly section we can use
12547 normal __memcpy_chk. */
12548 if (readonly_data_expr (src
))
12550 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12553 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12554 dest
, src
, len
, size
);
12555 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12556 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12557 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12564 /* Emit warning if a buffer overflow is detected at compile time. */
12567 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12571 location_t loc
= tree_nonartificial_location (exp
);
12575 case BUILT_IN_STRCPY_CHK
:
12576 case BUILT_IN_STPCPY_CHK
:
12577 /* For __strcat_chk the warning will be emitted only if overflowing
12578 by at least strlen (dest) + 1 bytes. */
12579 case BUILT_IN_STRCAT_CHK
:
12580 len
= CALL_EXPR_ARG (exp
, 1);
12581 size
= CALL_EXPR_ARG (exp
, 2);
12584 case BUILT_IN_STRNCAT_CHK
:
12585 case BUILT_IN_STRNCPY_CHK
:
12586 case BUILT_IN_STPNCPY_CHK
:
12587 len
= CALL_EXPR_ARG (exp
, 2);
12588 size
= CALL_EXPR_ARG (exp
, 3);
12590 case BUILT_IN_SNPRINTF_CHK
:
12591 case BUILT_IN_VSNPRINTF_CHK
:
12592 len
= CALL_EXPR_ARG (exp
, 1);
12593 size
= CALL_EXPR_ARG (exp
, 3);
12596 gcc_unreachable ();
12602 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12607 len
= c_strlen (len
, 1);
12608 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12611 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12613 tree src
= CALL_EXPR_ARG (exp
, 1);
12614 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12616 src
= c_strlen (src
, 1);
12617 if (! src
|| ! host_integerp (src
, 1))
12619 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12620 exp
, get_callee_fndecl (exp
));
12623 else if (tree_int_cst_lt (src
, size
))
12626 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12629 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12630 exp
, get_callee_fndecl (exp
));
12633 /* Emit warning if a buffer overflow is detected at compile time
12634 in __sprintf_chk/__vsprintf_chk calls. */
12637 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12639 tree size
, len
, fmt
;
12640 const char *fmt_str
;
12641 int nargs
= call_expr_nargs (exp
);
12643 /* Verify the required arguments in the original call. */
12647 size
= CALL_EXPR_ARG (exp
, 2);
12648 fmt
= CALL_EXPR_ARG (exp
, 3);
12650 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12653 /* Check whether the format is a literal string constant. */
12654 fmt_str
= c_getstr (fmt
);
12655 if (fmt_str
== NULL
)
12658 if (!init_target_chars ())
12661 /* If the format doesn't contain % args or %%, we know its size. */
12662 if (strchr (fmt_str
, target_percent
) == 0)
12663 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12664 /* If the format is "%s" and first ... argument is a string literal,
12666 else if (fcode
== BUILT_IN_SPRINTF_CHK
12667 && strcmp (fmt_str
, target_percent_s
) == 0)
12673 arg
= CALL_EXPR_ARG (exp
, 4);
12674 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12677 len
= c_strlen (arg
, 1);
12678 if (!len
|| ! host_integerp (len
, 1))
12684 if (! tree_int_cst_lt (len
, size
))
12685 warning_at (tree_nonartificial_location (exp
),
12686 0, "%Kcall to %D will always overflow destination buffer",
12687 exp
, get_callee_fndecl (exp
));
12690 /* Emit warning if a free is called with address of a variable. */
12693 maybe_emit_free_warning (tree exp
)
12695 tree arg
= CALL_EXPR_ARG (exp
, 0);
12698 if (TREE_CODE (arg
) != ADDR_EXPR
)
12701 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12702 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12705 if (SSA_VAR_P (arg
))
12706 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12707 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12709 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12710 "%Kattempt to free a non-heap object", exp
);
12713 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12717 fold_builtin_object_size (tree ptr
, tree ost
)
12719 unsigned HOST_WIDE_INT bytes
;
12720 int object_size_type
;
12722 if (!validate_arg (ptr
, POINTER_TYPE
)
12723 || !validate_arg (ost
, INTEGER_TYPE
))
12728 if (TREE_CODE (ost
) != INTEGER_CST
12729 || tree_int_cst_sgn (ost
) < 0
12730 || compare_tree_int (ost
, 3) > 0)
12733 object_size_type
= tree_low_cst (ost
, 0);
12735 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12736 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12737 and (size_t) 0 for types 2 and 3. */
12738 if (TREE_SIDE_EFFECTS (ptr
))
12739 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12741 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12743 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12744 if (double_int_fits_to_tree_p (size_type_node
,
12745 double_int::from_uhwi (bytes
)))
12746 return build_int_cstu (size_type_node
, bytes
);
12748 else if (TREE_CODE (ptr
) == SSA_NAME
)
12750 /* If object size is not known yet, delay folding until
12751 later. Maybe subsequent passes will help determining
12753 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12754 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12755 && double_int_fits_to_tree_p (size_type_node
,
12756 double_int::from_uhwi (bytes
)))
12757 return build_int_cstu (size_type_node
, bytes
);
12763 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12764 DEST, SRC, LEN, and SIZE are the arguments to the call.
12765 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12766 code of the builtin. If MAXLEN is not NULL, it is maximum length
12767 passed as third argument. */
12770 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12771 tree dest
, tree src
, tree len
, tree size
,
12772 tree maxlen
, bool ignore
,
12773 enum built_in_function fcode
)
12777 if (!validate_arg (dest
, POINTER_TYPE
)
12778 || !validate_arg (src
,
12779 (fcode
== BUILT_IN_MEMSET_CHK
12780 ? INTEGER_TYPE
: POINTER_TYPE
))
12781 || !validate_arg (len
, INTEGER_TYPE
)
12782 || !validate_arg (size
, INTEGER_TYPE
))
12785 /* If SRC and DEST are the same (and not volatile), return DEST
12786 (resp. DEST+LEN for __mempcpy_chk). */
12787 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12789 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12790 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12794 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12795 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12799 if (! host_integerp (size
, 1))
12802 if (! integer_all_onesp (size
))
12804 if (! host_integerp (len
, 1))
12806 /* If LEN is not constant, try MAXLEN too.
12807 For MAXLEN only allow optimizing into non-_ocs function
12808 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12809 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12811 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12813 /* (void) __mempcpy_chk () can be optimized into
12814 (void) __memcpy_chk (). */
12815 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12819 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12827 if (tree_int_cst_lt (size
, maxlen
))
12832 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12833 mem{cpy,pcpy,move,set} is available. */
12836 case BUILT_IN_MEMCPY_CHK
:
12837 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12839 case BUILT_IN_MEMPCPY_CHK
:
12840 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12842 case BUILT_IN_MEMMOVE_CHK
:
12843 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12845 case BUILT_IN_MEMSET_CHK
:
12846 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12855 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12858 /* Fold a call to the __st[rp]cpy_chk builtin.
12859 DEST, SRC, and SIZE are the arguments to the call.
12860 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12861 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12862 strings passed as second argument. */
12865 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12866 tree src
, tree size
,
12867 tree maxlen
, bool ignore
,
12868 enum built_in_function fcode
)
12872 if (!validate_arg (dest
, POINTER_TYPE
)
12873 || !validate_arg (src
, POINTER_TYPE
)
12874 || !validate_arg (size
, INTEGER_TYPE
))
12877 /* If SRC and DEST are the same (and not volatile), return DEST. */
12878 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12879 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12881 if (! host_integerp (size
, 1))
12884 if (! integer_all_onesp (size
))
12886 len
= c_strlen (src
, 1);
12887 if (! len
|| ! host_integerp (len
, 1))
12889 /* If LEN is not constant, try MAXLEN too.
12890 For MAXLEN only allow optimizing into non-_ocs function
12891 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12892 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12894 if (fcode
== BUILT_IN_STPCPY_CHK
)
12899 /* If return value of __stpcpy_chk is ignored,
12900 optimize into __strcpy_chk. */
12901 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12905 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12908 if (! len
|| TREE_SIDE_EFFECTS (len
))
12911 /* If c_strlen returned something, but not a constant,
12912 transform __strcpy_chk into __memcpy_chk. */
12913 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12917 len
= fold_convert_loc (loc
, size_type_node
, len
);
12918 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12919 build_int_cst (size_type_node
, 1));
12920 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12921 build_call_expr_loc (loc
, fn
, 4,
12922 dest
, src
, len
, size
));
12928 if (! tree_int_cst_lt (maxlen
, size
))
12932 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12933 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12934 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12938 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12941 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12942 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12943 length passed as third argument. IGNORE is true if return value can be
12944 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12947 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12948 tree len
, tree size
, tree maxlen
, bool ignore
,
12949 enum built_in_function fcode
)
12953 if (!validate_arg (dest
, POINTER_TYPE
)
12954 || !validate_arg (src
, POINTER_TYPE
)
12955 || !validate_arg (len
, INTEGER_TYPE
)
12956 || !validate_arg (size
, INTEGER_TYPE
))
12959 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
12961 /* If return value of __stpncpy_chk is ignored,
12962 optimize into __strncpy_chk. */
12963 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
12965 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12968 if (! host_integerp (size
, 1))
12971 if (! integer_all_onesp (size
))
12973 if (! host_integerp (len
, 1))
12975 /* If LEN is not constant, try MAXLEN too.
12976 For MAXLEN only allow optimizing into non-_ocs function
12977 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12978 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12984 if (tree_int_cst_lt (size
, maxlen
))
12988 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12989 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
12990 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
12994 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12997 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12998 are the arguments to the call. */
13001 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
13002 tree src
, tree size
)
13007 if (!validate_arg (dest
, POINTER_TYPE
)
13008 || !validate_arg (src
, POINTER_TYPE
)
13009 || !validate_arg (size
, INTEGER_TYPE
))
13012 p
= c_getstr (src
);
13013 /* If the SRC parameter is "", return DEST. */
13014 if (p
&& *p
== '\0')
13015 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13017 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
13020 /* If __builtin_strcat_chk is used, assume strcat is available. */
13021 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
13025 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
13028 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13032 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
13033 tree dest
, tree src
, tree len
, tree size
)
13038 if (!validate_arg (dest
, POINTER_TYPE
)
13039 || !validate_arg (src
, POINTER_TYPE
)
13040 || !validate_arg (size
, INTEGER_TYPE
)
13041 || !validate_arg (size
, INTEGER_TYPE
))
13044 p
= c_getstr (src
);
13045 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13046 if (p
&& *p
== '\0')
13047 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
13048 else if (integer_zerop (len
))
13049 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13051 if (! host_integerp (size
, 1))
13054 if (! integer_all_onesp (size
))
13056 tree src_len
= c_strlen (src
, 1);
13058 && host_integerp (src_len
, 1)
13059 && host_integerp (len
, 1)
13060 && ! tree_int_cst_lt (len
, src_len
))
13062 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13063 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
13067 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
13072 /* If __builtin_strncat_chk is used, assume strncat is available. */
13073 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
13077 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13080 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13081 Return NULL_TREE if a normal call should be emitted rather than
13082 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13083 or BUILT_IN_VSPRINTF_CHK. */
13086 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13087 enum built_in_function fcode
)
13089 tree dest
, size
, len
, fn
, fmt
, flag
;
13090 const char *fmt_str
;
13092 /* Verify the required arguments in the original call. */
13096 if (!validate_arg (dest
, POINTER_TYPE
))
13099 if (!validate_arg (flag
, INTEGER_TYPE
))
13102 if (!validate_arg (size
, INTEGER_TYPE
))
13105 if (!validate_arg (fmt
, POINTER_TYPE
))
13108 if (! host_integerp (size
, 1))
13113 if (!init_target_chars ())
13116 /* Check whether the format is a literal string constant. */
13117 fmt_str
= c_getstr (fmt
);
13118 if (fmt_str
!= NULL
)
13120 /* If the format doesn't contain % args or %%, we know the size. */
13121 if (strchr (fmt_str
, target_percent
) == 0)
13123 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13124 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13126 /* If the format is "%s" and first ... argument is a string literal,
13127 we know the size too. */
13128 else if (fcode
== BUILT_IN_SPRINTF_CHK
13129 && strcmp (fmt_str
, target_percent_s
) == 0)
13136 if (validate_arg (arg
, POINTER_TYPE
))
13138 len
= c_strlen (arg
, 1);
13139 if (! len
|| ! host_integerp (len
, 1))
13146 if (! integer_all_onesp (size
))
13148 if (! len
|| ! tree_int_cst_lt (len
, size
))
13152 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13153 or if format doesn't contain % chars or is "%s". */
13154 if (! integer_zerop (flag
))
13156 if (fmt_str
== NULL
)
13158 if (strchr (fmt_str
, target_percent
) != NULL
13159 && strcmp (fmt_str
, target_percent_s
))
13163 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13164 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13165 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13169 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13172 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13173 a normal call should be emitted rather than expanding the function
13174 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13177 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13178 enum built_in_function fcode
)
13180 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13181 CALL_EXPR_ARGP (exp
), fcode
);
13184 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13185 NULL_TREE if a normal call should be emitted rather than expanding
13186 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13187 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13188 passed as second argument. */
13191 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13192 tree maxlen
, enum built_in_function fcode
)
13194 tree dest
, size
, len
, fn
, fmt
, flag
;
13195 const char *fmt_str
;
13197 /* Verify the required arguments in the original call. */
13201 if (!validate_arg (dest
, POINTER_TYPE
))
13204 if (!validate_arg (len
, INTEGER_TYPE
))
13207 if (!validate_arg (flag
, INTEGER_TYPE
))
13210 if (!validate_arg (size
, INTEGER_TYPE
))
13213 if (!validate_arg (fmt
, POINTER_TYPE
))
13216 if (! host_integerp (size
, 1))
13219 if (! integer_all_onesp (size
))
13221 if (! host_integerp (len
, 1))
13223 /* If LEN is not constant, try MAXLEN too.
13224 For MAXLEN only allow optimizing into non-_ocs function
13225 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13226 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13232 if (tree_int_cst_lt (size
, maxlen
))
13236 if (!init_target_chars ())
13239 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13240 or if format doesn't contain % chars or is "%s". */
13241 if (! integer_zerop (flag
))
13243 fmt_str
= c_getstr (fmt
);
13244 if (fmt_str
== NULL
)
13246 if (strchr (fmt_str
, target_percent
) != NULL
13247 && strcmp (fmt_str
, target_percent_s
))
13251 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13253 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13254 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13258 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13261 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13262 a normal call should be emitted rather than expanding the function
13263 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13264 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13265 passed as second argument. */
13268 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13269 enum built_in_function fcode
)
13271 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13272 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13275 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13276 FMT and ARG are the arguments to the call; we don't fold cases with
13277 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13279 Return NULL_TREE if no simplification was possible, otherwise return the
13280 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13281 code of the function to be simplified. */
13284 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13285 tree arg
, bool ignore
,
13286 enum built_in_function fcode
)
13288 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13289 const char *fmt_str
= NULL
;
13291 /* If the return value is used, don't do the transformation. */
13295 /* Verify the required arguments in the original call. */
13296 if (!validate_arg (fmt
, POINTER_TYPE
))
13299 /* Check whether the format is a literal string constant. */
13300 fmt_str
= c_getstr (fmt
);
13301 if (fmt_str
== NULL
)
13304 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13306 /* If we're using an unlocked function, assume the other
13307 unlocked functions exist explicitly. */
13308 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13309 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13313 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13314 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13317 if (!init_target_chars ())
13320 if (strcmp (fmt_str
, target_percent_s
) == 0
13321 || strchr (fmt_str
, target_percent
) == NULL
)
13325 if (strcmp (fmt_str
, target_percent_s
) == 0)
13327 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13330 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13333 str
= c_getstr (arg
);
13339 /* The format specifier doesn't contain any '%' characters. */
13340 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13346 /* If the string was "", printf does nothing. */
13347 if (str
[0] == '\0')
13348 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13350 /* If the string has length of 1, call putchar. */
13351 if (str
[1] == '\0')
13353 /* Given printf("c"), (where c is any one character,)
13354 convert "c"[0] to an int and pass that to the replacement
13356 newarg
= build_int_cst (integer_type_node
, str
[0]);
13358 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13362 /* If the string was "string\n", call puts("string"). */
13363 size_t len
= strlen (str
);
13364 if ((unsigned char)str
[len
- 1] == target_newline
13365 && (size_t) (int) len
== len
13369 tree offset_node
, string_cst
;
13371 /* Create a NUL-terminated string that's one char shorter
13372 than the original, stripping off the trailing '\n'. */
13373 newarg
= build_string_literal (len
, str
);
13374 string_cst
= string_constant (newarg
, &offset_node
);
13375 gcc_checking_assert (string_cst
13376 && (TREE_STRING_LENGTH (string_cst
)
13378 && integer_zerop (offset_node
)
13380 TREE_STRING_POINTER (string_cst
)[len
- 1]
13381 == target_newline
);
13382 /* build_string_literal creates a new STRING_CST,
13383 modify it in place to avoid double copying. */
13384 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13385 newstr
[len
- 1] = '\0';
13387 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13390 /* We'd like to arrange to call fputs(string,stdout) here,
13391 but we need stdout and don't have a way to get it yet. */
13396 /* The other optimizations can be done only on the non-va_list variants. */
13397 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13400 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13401 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13403 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13406 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13409 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13410 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13412 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13415 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13421 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13424 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13425 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13426 more than 3 arguments, and ARG may be null in the 2-argument case.
13428 Return NULL_TREE if no simplification was possible, otherwise return the
13429 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13430 code of the function to be simplified. */
13433 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13434 tree fmt
, tree arg
, bool ignore
,
13435 enum built_in_function fcode
)
13437 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13438 const char *fmt_str
= NULL
;
13440 /* If the return value is used, don't do the transformation. */
13444 /* Verify the required arguments in the original call. */
13445 if (!validate_arg (fp
, POINTER_TYPE
))
13447 if (!validate_arg (fmt
, POINTER_TYPE
))
13450 /* Check whether the format is a literal string constant. */
13451 fmt_str
= c_getstr (fmt
);
13452 if (fmt_str
== NULL
)
13455 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13457 /* If we're using an unlocked function, assume the other
13458 unlocked functions exist explicitly. */
13459 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13460 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13464 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13465 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13468 if (!init_target_chars ())
13471 /* If the format doesn't contain % args or %%, use strcpy. */
13472 if (strchr (fmt_str
, target_percent
) == NULL
)
13474 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13478 /* If the format specifier was "", fprintf does nothing. */
13479 if (fmt_str
[0] == '\0')
13481 /* If FP has side-effects, just wait until gimplification is
13483 if (TREE_SIDE_EFFECTS (fp
))
13486 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13489 /* When "string" doesn't contain %, replace all cases of
13490 fprintf (fp, string) with fputs (string, fp). The fputs
13491 builtin will take care of special cases like length == 1. */
13493 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13496 /* The other optimizations can be done only on the non-va_list variants. */
13497 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13500 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13501 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13503 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13506 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13509 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13510 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13512 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13515 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13520 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13523 /* Initialize format string characters in the target charset. */
13526 init_target_chars (void)
13531 target_newline
= lang_hooks
.to_target_charset ('\n');
13532 target_percent
= lang_hooks
.to_target_charset ('%');
13533 target_c
= lang_hooks
.to_target_charset ('c');
13534 target_s
= lang_hooks
.to_target_charset ('s');
13535 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13539 target_percent_c
[0] = target_percent
;
13540 target_percent_c
[1] = target_c
;
13541 target_percent_c
[2] = '\0';
13543 target_percent_s
[0] = target_percent
;
13544 target_percent_s
[1] = target_s
;
13545 target_percent_s
[2] = '\0';
13547 target_percent_s_newline
[0] = target_percent
;
13548 target_percent_s_newline
[1] = target_s
;
13549 target_percent_s_newline
[2] = target_newline
;
13550 target_percent_s_newline
[3] = '\0';
13557 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13558 and no overflow/underflow occurred. INEXACT is true if M was not
13559 exactly calculated. TYPE is the tree type for the result. This
13560 function assumes that you cleared the MPFR flags and then
13561 calculated M to see if anything subsequently set a flag prior to
13562 entering this function. Return NULL_TREE if any checks fail. */
13565 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13567 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13568 overflow/underflow occurred. If -frounding-math, proceed iff the
13569 result of calling FUNC was exact. */
13570 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13571 && (!flag_rounding_math
|| !inexact
))
13573 REAL_VALUE_TYPE rr
;
13575 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13576 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13577 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13578 but the mpft_t is not, then we underflowed in the
13580 if (real_isfinite (&rr
)
13581 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13583 REAL_VALUE_TYPE rmode
;
13585 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13586 /* Proceed iff the specified mode can hold the value. */
13587 if (real_identical (&rmode
, &rr
))
13588 return build_real (type
, rmode
);
13594 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13595 number and no overflow/underflow occurred. INEXACT is true if M
13596 was not exactly calculated. TYPE is the tree type for the result.
13597 This function assumes that you cleared the MPFR flags and then
13598 calculated M to see if anything subsequently set a flag prior to
13599 entering this function. Return NULL_TREE if any checks fail, if
13600 FORCE_CONVERT is true, then bypass the checks. */
13603 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13605 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13606 overflow/underflow occurred. If -frounding-math, proceed iff the
13607 result of calling FUNC was exact. */
13609 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13610 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13611 && (!flag_rounding_math
|| !inexact
)))
13613 REAL_VALUE_TYPE re
, im
;
13615 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13616 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13617 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13618 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13619 but the mpft_t is not, then we underflowed in the
13622 || (real_isfinite (&re
) && real_isfinite (&im
)
13623 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13624 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13626 REAL_VALUE_TYPE re_mode
, im_mode
;
13628 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13629 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13630 /* Proceed iff the specified mode can hold the value. */
13632 || (real_identical (&re_mode
, &re
)
13633 && real_identical (&im_mode
, &im
)))
13634 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13635 build_real (TREE_TYPE (type
), im_mode
));
13641 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13642 FUNC on it and return the resulting value as a tree with type TYPE.
13643 If MIN and/or MAX are not NULL, then the supplied ARG must be
13644 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13645 acceptable values, otherwise they are not. The mpfr precision is
13646 set to the precision of TYPE. We assume that function FUNC returns
13647 zero if the result could be calculated exactly within the requested
13651 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13652 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13655 tree result
= NULL_TREE
;
13659 /* To proceed, MPFR must exactly represent the target floating point
13660 format, which only happens when the target base equals two. */
13661 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13662 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13664 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13666 if (real_isfinite (ra
)
13667 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13668 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13670 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13671 const int prec
= fmt
->p
;
13672 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13676 mpfr_init2 (m
, prec
);
13677 mpfr_from_real (m
, ra
, GMP_RNDN
);
13678 mpfr_clear_flags ();
13679 inexact
= func (m
, m
, rnd
);
13680 result
= do_mpfr_ckconv (m
, type
, inexact
);
13688 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13689 FUNC on it and return the resulting value as a tree with type TYPE.
13690 The mpfr precision is set to the precision of TYPE. We assume that
13691 function FUNC returns zero if the result could be calculated
13692 exactly within the requested precision. */
13695 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13696 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13698 tree result
= NULL_TREE
;
13703 /* To proceed, MPFR must exactly represent the target floating point
13704 format, which only happens when the target base equals two. */
13705 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13706 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13707 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13709 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13710 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13712 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13714 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13715 const int prec
= fmt
->p
;
13716 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13720 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13721 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13722 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13723 mpfr_clear_flags ();
13724 inexact
= func (m1
, m1
, m2
, rnd
);
13725 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13726 mpfr_clears (m1
, m2
, NULL
);
13733 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13734 FUNC on it and return the resulting value as a tree with type TYPE.
13735 The mpfr precision is set to the precision of TYPE. We assume that
13736 function FUNC returns zero if the result could be calculated
13737 exactly within the requested precision. */
13740 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13741 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13743 tree result
= NULL_TREE
;
13749 /* To proceed, MPFR must exactly represent the target floating point
13750 format, which only happens when the target base equals two. */
13751 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13752 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13753 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13754 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13756 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13757 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13758 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13760 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13762 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13763 const int prec
= fmt
->p
;
13764 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13768 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13769 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13770 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13771 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13772 mpfr_clear_flags ();
13773 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13774 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13775 mpfr_clears (m1
, m2
, m3
, NULL
);
13782 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13783 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13784 If ARG_SINP and ARG_COSP are NULL then the result is returned
13785 as a complex value.
13786 The type is taken from the type of ARG and is used for setting the
13787 precision of the calculation and results. */
13790 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13792 tree
const type
= TREE_TYPE (arg
);
13793 tree result
= NULL_TREE
;
13797 /* To proceed, MPFR must exactly represent the target floating point
13798 format, which only happens when the target base equals two. */
13799 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13800 && TREE_CODE (arg
) == REAL_CST
13801 && !TREE_OVERFLOW (arg
))
13803 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13805 if (real_isfinite (ra
))
13807 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13808 const int prec
= fmt
->p
;
13809 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13810 tree result_s
, result_c
;
13814 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13815 mpfr_from_real (m
, ra
, GMP_RNDN
);
13816 mpfr_clear_flags ();
13817 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13818 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13819 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13820 mpfr_clears (m
, ms
, mc
, NULL
);
13821 if (result_s
&& result_c
)
13823 /* If we are to return in a complex value do so. */
13824 if (!arg_sinp
&& !arg_cosp
)
13825 return build_complex (build_complex_type (type
),
13826 result_c
, result_s
);
13828 /* Dereference the sin/cos pointer arguments. */
13829 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13830 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13831 /* Proceed if valid pointer type were passed in. */
13832 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13833 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13835 /* Set the values. */
13836 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13838 TREE_SIDE_EFFECTS (result_s
) = 1;
13839 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13841 TREE_SIDE_EFFECTS (result_c
) = 1;
13842 /* Combine the assignments into a compound expr. */
13843 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13844 result_s
, result_c
));
13852 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13853 two-argument mpfr order N Bessel function FUNC on them and return
13854 the resulting value as a tree with type TYPE. The mpfr precision
13855 is set to the precision of TYPE. We assume that function FUNC
13856 returns zero if the result could be calculated exactly within the
13857 requested precision. */
13859 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13860 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13861 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13863 tree result
= NULL_TREE
;
13868 /* To proceed, MPFR must exactly represent the target floating point
13869 format, which only happens when the target base equals two. */
13870 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13871 && host_integerp (arg1
, 0)
13872 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13874 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13875 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13878 && real_isfinite (ra
)
13879 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13881 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13882 const int prec
= fmt
->p
;
13883 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13887 mpfr_init2 (m
, prec
);
13888 mpfr_from_real (m
, ra
, GMP_RNDN
);
13889 mpfr_clear_flags ();
13890 inexact
= func (m
, n
, m
, rnd
);
13891 result
= do_mpfr_ckconv (m
, type
, inexact
);
13899 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13900 the pointer *(ARG_QUO) and return the result. The type is taken
13901 from the type of ARG0 and is used for setting the precision of the
13902 calculation and results. */
13905 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13907 tree
const type
= TREE_TYPE (arg0
);
13908 tree result
= NULL_TREE
;
13913 /* To proceed, MPFR must exactly represent the target floating point
13914 format, which only happens when the target base equals two. */
13915 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13916 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13917 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13919 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13920 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13922 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13924 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13925 const int prec
= fmt
->p
;
13926 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13931 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13932 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13933 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13934 mpfr_clear_flags ();
13935 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13936 /* Remquo is independent of the rounding mode, so pass
13937 inexact=0 to do_mpfr_ckconv(). */
13938 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13939 mpfr_clears (m0
, m1
, NULL
);
13942 /* MPFR calculates quo in the host's long so it may
13943 return more bits in quo than the target int can hold
13944 if sizeof(host long) > sizeof(target int). This can
13945 happen even for native compilers in LP64 mode. In
13946 these cases, modulo the quo value with the largest
13947 number that the target int can hold while leaving one
13948 bit for the sign. */
13949 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13950 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13952 /* Dereference the quo pointer argument. */
13953 arg_quo
= build_fold_indirect_ref (arg_quo
);
13954 /* Proceed iff a valid pointer type was passed in. */
13955 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13957 /* Set the value. */
13959 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
13960 build_int_cst (TREE_TYPE (arg_quo
),
13962 TREE_SIDE_EFFECTS (result_quo
) = 1;
13963 /* Combine the quo assignment with the rem. */
13964 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13965 result_quo
, result_rem
));
13973 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13974 resulting value as a tree with type TYPE. The mpfr precision is
13975 set to the precision of TYPE. We assume that this mpfr function
13976 returns zero if the result could be calculated exactly within the
13977 requested precision. In addition, the integer pointer represented
13978 by ARG_SG will be dereferenced and set to the appropriate signgam
13982 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13984 tree result
= NULL_TREE
;
13988 /* To proceed, MPFR must exactly represent the target floating point
13989 format, which only happens when the target base equals two. Also
13990 verify ARG is a constant and that ARG_SG is an int pointer. */
13991 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13992 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13993 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13994 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13996 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13998 /* In addition to NaN and Inf, the argument cannot be zero or a
13999 negative integer. */
14000 if (real_isfinite (ra
)
14001 && ra
->cl
!= rvc_zero
14002 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
14004 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
14005 const int prec
= fmt
->p
;
14006 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14011 mpfr_init2 (m
, prec
);
14012 mpfr_from_real (m
, ra
, GMP_RNDN
);
14013 mpfr_clear_flags ();
14014 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
14015 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
14021 /* Dereference the arg_sg pointer argument. */
14022 arg_sg
= build_fold_indirect_ref (arg_sg
);
14023 /* Assign the signgam value into *arg_sg. */
14024 result_sg
= fold_build2 (MODIFY_EXPR
,
14025 TREE_TYPE (arg_sg
), arg_sg
,
14026 build_int_cst (TREE_TYPE (arg_sg
), sg
));
14027 TREE_SIDE_EFFECTS (result_sg
) = 1;
14028 /* Combine the signgam assignment with the lgamma result. */
14029 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
14030 result_sg
, result_lg
));
14038 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14039 function FUNC on it and return the resulting value as a tree with
14040 type TYPE. The mpfr precision is set to the precision of TYPE. We
14041 assume that function FUNC returns zero if the result could be
14042 calculated exactly within the requested precision. */
14045 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
14047 tree result
= NULL_TREE
;
14051 /* To proceed, MPFR must exactly represent the target floating point
14052 format, which only happens when the target base equals two. */
14053 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
14054 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
14055 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
14057 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
14058 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
14060 if (real_isfinite (re
) && real_isfinite (im
))
14062 const struct real_format
*const fmt
=
14063 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14064 const int prec
= fmt
->p
;
14065 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14066 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14070 mpc_init2 (m
, prec
);
14071 mpfr_from_real (mpc_realref(m
), re
, rnd
);
14072 mpfr_from_real (mpc_imagref(m
), im
, rnd
);
14073 mpfr_clear_flags ();
14074 inexact
= func (m
, m
, crnd
);
14075 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
14083 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14084 mpc function FUNC on it and return the resulting value as a tree
14085 with type TYPE. The mpfr precision is set to the precision of
14086 TYPE. We assume that function FUNC returns zero if the result
14087 could be calculated exactly within the requested precision. If
14088 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14089 in the arguments and/or results. */
14092 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
14093 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
14095 tree result
= NULL_TREE
;
14100 /* To proceed, MPFR must exactly represent the target floating point
14101 format, which only happens when the target base equals two. */
14102 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
14103 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
14104 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
14105 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
14106 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
14108 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
14109 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
14110 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
14111 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
14114 || (real_isfinite (re0
) && real_isfinite (im0
)
14115 && real_isfinite (re1
) && real_isfinite (im1
)))
14117 const struct real_format
*const fmt
=
14118 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14119 const int prec
= fmt
->p
;
14120 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14121 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14125 mpc_init2 (m0
, prec
);
14126 mpc_init2 (m1
, prec
);
14127 mpfr_from_real (mpc_realref(m0
), re0
, rnd
);
14128 mpfr_from_real (mpc_imagref(m0
), im0
, rnd
);
14129 mpfr_from_real (mpc_realref(m1
), re1
, rnd
);
14130 mpfr_from_real (mpc_imagref(m1
), im1
, rnd
);
14131 mpfr_clear_flags ();
14132 inexact
= func (m0
, m0
, m1
, crnd
);
14133 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14142 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14143 a normal call should be emitted rather than expanding the function
14144 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14147 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14149 int nargs
= gimple_call_num_args (stmt
);
14151 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14153 ? gimple_call_arg_ptr (stmt
, 0)
14154 : &error_mark_node
), fcode
);
14157 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14158 a normal call should be emitted rather than expanding the function
14159 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14160 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14161 passed as second argument. */
14164 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14165 enum built_in_function fcode
)
14167 int nargs
= gimple_call_num_args (stmt
);
14169 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14171 ? gimple_call_arg_ptr (stmt
, 0)
14172 : &error_mark_node
), maxlen
, fcode
);
14175 /* Builtins with folding operations that operate on "..." arguments
14176 need special handling; we need to store the arguments in a convenient
14177 data structure before attempting any folding. Fortunately there are
14178 only a few builtins that fall into this category. FNDECL is the
14179 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14180 result of the function call is ignored. */
14183 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14184 bool ignore ATTRIBUTE_UNUSED
)
14186 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14187 tree ret
= NULL_TREE
;
14191 case BUILT_IN_SPRINTF_CHK
:
14192 case BUILT_IN_VSPRINTF_CHK
:
14193 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14196 case BUILT_IN_SNPRINTF_CHK
:
14197 case BUILT_IN_VSNPRINTF_CHK
:
14198 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14205 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14206 TREE_NO_WARNING (ret
) = 1;
14212 /* A wrapper function for builtin folding that prevents warnings for
14213 "statement without effect" and the like, caused by removing the
14214 call node earlier than the warning is generated. */
14217 fold_call_stmt (gimple stmt
, bool ignore
)
14219 tree ret
= NULL_TREE
;
14220 tree fndecl
= gimple_call_fndecl (stmt
);
14221 location_t loc
= gimple_location (stmt
);
14223 && TREE_CODE (fndecl
) == FUNCTION_DECL
14224 && DECL_BUILT_IN (fndecl
)
14225 && !gimple_call_va_arg_pack_p (stmt
))
14227 int nargs
= gimple_call_num_args (stmt
);
14228 tree
*args
= (nargs
> 0
14229 ? gimple_call_arg_ptr (stmt
, 0)
14230 : &error_mark_node
);
14232 if (avoid_folding_inline_builtin (fndecl
))
14234 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14236 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14240 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14241 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14243 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14246 /* Propagate location information from original call to
14247 expansion of builtin. Otherwise things like
14248 maybe_emit_chk_warning, that operate on the expansion
14249 of a builtin, will use the wrong location information. */
14250 if (gimple_has_location (stmt
))
14252 tree realret
= ret
;
14253 if (TREE_CODE (ret
) == NOP_EXPR
)
14254 realret
= TREE_OPERAND (ret
, 0);
14255 if (CAN_HAVE_LOCATION_P (realret
)
14256 && !EXPR_HAS_LOCATION (realret
))
14257 SET_EXPR_LOCATION (realret
, loc
);
14267 /* Look up the function in builtin_decl that corresponds to DECL
14268 and set ASMSPEC as its user assembler name. DECL must be a
14269 function decl that declares a builtin. */
14272 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14275 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14276 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14279 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14280 set_user_assembler_name (builtin
, asmspec
);
14281 switch (DECL_FUNCTION_CODE (decl
))
14283 case BUILT_IN_MEMCPY
:
14284 init_block_move_fn (asmspec
);
14285 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14287 case BUILT_IN_MEMSET
:
14288 init_block_clear_fn (asmspec
);
14289 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14291 case BUILT_IN_MEMMOVE
:
14292 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14294 case BUILT_IN_MEMCMP
:
14295 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14297 case BUILT_IN_ABORT
:
14298 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14301 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14303 set_user_assembler_libfunc ("ffs", asmspec
);
14304 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14305 MODE_INT
, 0), "ffs");
14313 /* Return true if DECL is a builtin that expands to a constant or similarly
14316 is_simple_builtin (tree decl
)
14318 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14319 switch (DECL_FUNCTION_CODE (decl
))
14321 /* Builtins that expand to constants. */
14322 case BUILT_IN_CONSTANT_P
:
14323 case BUILT_IN_EXPECT
:
14324 case BUILT_IN_OBJECT_SIZE
:
14325 case BUILT_IN_UNREACHABLE
:
14326 /* Simple register moves or loads from stack. */
14327 case BUILT_IN_ASSUME_ALIGNED
:
14328 case BUILT_IN_RETURN_ADDRESS
:
14329 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14330 case BUILT_IN_FROB_RETURN_ADDR
:
14331 case BUILT_IN_RETURN
:
14332 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14333 case BUILT_IN_FRAME_ADDRESS
:
14334 case BUILT_IN_VA_END
:
14335 case BUILT_IN_STACK_SAVE
:
14336 case BUILT_IN_STACK_RESTORE
:
14337 /* Exception state returns or moves registers around. */
14338 case BUILT_IN_EH_FILTER
:
14339 case BUILT_IN_EH_POINTER
:
14340 case BUILT_IN_EH_COPY_VALUES
:
14350 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14351 most probably expanded inline into reasonably simple code. This is a
14352 superset of is_simple_builtin. */
14354 is_inexpensive_builtin (tree decl
)
14358 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14360 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14361 switch (DECL_FUNCTION_CODE (decl
))
14364 case BUILT_IN_ALLOCA
:
14365 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14366 case BUILT_IN_BSWAP16
:
14367 case BUILT_IN_BSWAP32
:
14368 case BUILT_IN_BSWAP64
:
14370 case BUILT_IN_CLZIMAX
:
14371 case BUILT_IN_CLZL
:
14372 case BUILT_IN_CLZLL
:
14374 case BUILT_IN_CTZIMAX
:
14375 case BUILT_IN_CTZL
:
14376 case BUILT_IN_CTZLL
:
14378 case BUILT_IN_FFSIMAX
:
14379 case BUILT_IN_FFSL
:
14380 case BUILT_IN_FFSLL
:
14381 case BUILT_IN_IMAXABS
:
14382 case BUILT_IN_FINITE
:
14383 case BUILT_IN_FINITEF
:
14384 case BUILT_IN_FINITEL
:
14385 case BUILT_IN_FINITED32
:
14386 case BUILT_IN_FINITED64
:
14387 case BUILT_IN_FINITED128
:
14388 case BUILT_IN_FPCLASSIFY
:
14389 case BUILT_IN_ISFINITE
:
14390 case BUILT_IN_ISINF_SIGN
:
14391 case BUILT_IN_ISINF
:
14392 case BUILT_IN_ISINFF
:
14393 case BUILT_IN_ISINFL
:
14394 case BUILT_IN_ISINFD32
:
14395 case BUILT_IN_ISINFD64
:
14396 case BUILT_IN_ISINFD128
:
14397 case BUILT_IN_ISNAN
:
14398 case BUILT_IN_ISNANF
:
14399 case BUILT_IN_ISNANL
:
14400 case BUILT_IN_ISNAND32
:
14401 case BUILT_IN_ISNAND64
:
14402 case BUILT_IN_ISNAND128
:
14403 case BUILT_IN_ISNORMAL
:
14404 case BUILT_IN_ISGREATER
:
14405 case BUILT_IN_ISGREATEREQUAL
:
14406 case BUILT_IN_ISLESS
:
14407 case BUILT_IN_ISLESSEQUAL
:
14408 case BUILT_IN_ISLESSGREATER
:
14409 case BUILT_IN_ISUNORDERED
:
14410 case BUILT_IN_VA_ARG_PACK
:
14411 case BUILT_IN_VA_ARG_PACK_LEN
:
14412 case BUILT_IN_VA_COPY
:
14413 case BUILT_IN_TRAP
:
14414 case BUILT_IN_SAVEREGS
:
14415 case BUILT_IN_POPCOUNTL
:
14416 case BUILT_IN_POPCOUNTLL
:
14417 case BUILT_IN_POPCOUNTIMAX
:
14418 case BUILT_IN_POPCOUNT
:
14419 case BUILT_IN_PARITYL
:
14420 case BUILT_IN_PARITYLL
:
14421 case BUILT_IN_PARITYIMAX
:
14422 case BUILT_IN_PARITY
:
14423 case BUILT_IN_LABS
:
14424 case BUILT_IN_LLABS
:
14425 case BUILT_IN_PREFETCH
:
14429 return is_simple_builtin (decl
);