1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
60 struct target_builtins default_target_builtins
;
62 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names
[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names
[(int) END_BUILTINS
] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info
;
80 static const char *c_getstr (tree
);
81 static rtx
c_readstr (const char *, enum machine_mode
);
82 static int target_char_cast (tree
, char *);
83 static rtx
get_memory_rtx (tree
, tree
);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx
result_vector (int, rtx
);
89 static void expand_builtin_update_setjmp_buf (rtx
);
90 static void expand_builtin_prefetch (tree
);
91 static rtx
expand_builtin_apply_args (void);
92 static rtx
expand_builtin_apply_args_1 (void);
93 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
94 static void expand_builtin_return (rtx
);
95 static enum type_class
type_to_class (tree
);
96 static rtx
expand_builtin_classify_type (tree
);
97 static void expand_errno_check (tree
, rtx
);
98 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
103 static rtx
expand_builtin_sincos (tree
);
104 static rtx
expand_builtin_cexpi (tree
, rtx
);
105 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
106 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
107 static rtx
expand_builtin_next_arg (void);
108 static rtx
expand_builtin_va_start (tree
);
109 static rtx
expand_builtin_va_end (tree
);
110 static rtx
expand_builtin_va_copy (tree
);
111 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
112 static rtx
expand_builtin_strcmp (tree
, rtx
);
113 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
114 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
115 static rtx
expand_builtin_memcpy (tree
, rtx
);
116 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
118 enum machine_mode
, int);
119 static rtx
expand_builtin_strcpy (tree
, rtx
);
120 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
121 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_strncpy (tree
, rtx
);
123 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
126 static rtx
expand_builtin_bzero (tree
);
127 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_alloca (tree
, bool);
129 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
130 static rtx
expand_builtin_frame_address (tree
, tree
);
131 static tree
stabilize_va_list_loc (location_t
, tree
, int);
132 static rtx
expand_builtin_expect (tree
, rtx
);
133 static tree
fold_builtin_constant_p (tree
);
134 static tree
fold_builtin_expect (location_t
, tree
, tree
);
135 static tree
fold_builtin_classify_type (tree
);
136 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
137 static tree
fold_builtin_inf (location_t
, tree
, int);
138 static tree
fold_builtin_nan (tree
, tree
, int);
139 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
140 static bool validate_arg (const_tree
, enum tree_code code
);
141 static bool integer_valued_real_p (tree
);
142 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
143 static bool readonly_data_expr (tree
);
144 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
145 static rtx
expand_builtin_signbit (tree
, rtx
);
146 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
147 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
148 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
149 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
150 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
151 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
152 static tree
fold_builtin_tan (tree
, tree
);
153 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
154 static tree
fold_builtin_floor (location_t
, tree
, tree
);
155 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
156 static tree
fold_builtin_round (location_t
, tree
, tree
);
157 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
158 static tree
fold_builtin_bitop (tree
, tree
);
159 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
160 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
164 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
166 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_isascii (location_t
, tree
);
168 static tree
fold_builtin_toascii (location_t
, tree
);
169 static tree
fold_builtin_isdigit (location_t
, tree
);
170 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
171 static tree
fold_builtin_abs (location_t
, tree
, tree
);
172 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
174 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
175 static tree
fold_builtin_0 (location_t
, tree
, bool);
176 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
177 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
178 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
179 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
180 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
182 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
184 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
186 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
187 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
188 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
189 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
190 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
192 static rtx
expand_builtin_object_size (tree
);
193 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
194 enum built_in_function
);
195 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
196 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
197 static void maybe_emit_free_warning (tree
);
198 static tree
fold_builtin_object_size (tree
, tree
);
199 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
200 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
201 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
202 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
203 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
204 enum built_in_function
);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline
;
208 static unsigned HOST_WIDE_INT target_percent
;
209 static unsigned HOST_WIDE_INT target_c
;
210 static unsigned HOST_WIDE_INT target_s
;
211 static char target_percent_c
[3];
212 static char target_percent_s
[3];
213 static char target_percent_s_newline
[4];
214 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
215 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
216 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
219 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
220 static tree
do_mpfr_sincos (tree
, tree
, tree
);
221 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
222 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
223 const REAL_VALUE_TYPE
*, bool);
224 static tree
do_mpfr_remquo (tree
, tree
, tree
);
225 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name
)
233 if (strncmp (name
, "__builtin_", 10) == 0)
235 if (strncmp (name
, "__sync_", 7) == 0)
237 if (strncmp (name
, "__atomic_", 9) == 0)
243 /* Return true if DECL is a function symbol representing a built-in. */
246 is_builtin_fn (tree decl
)
248 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
257 called_as_built_in (tree node
)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
262 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
263 return is_builtin_name (name
);
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address.
278 If ADDR_P is true we are taking the address of the memory reference EXP
279 and thus cannot rely on the access taking place. */
282 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
283 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
285 HOST_WIDE_INT bitsize
, bitpos
;
287 enum machine_mode mode
;
288 int unsignedp
, volatilep
;
289 unsigned int inner
, align
= BITS_PER_UNIT
;
290 bool known_alignment
= false;
292 /* Get the innermost object and the constant (bitpos) and possibly
293 variable (offset) offset of the access. */
294 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
295 &mode
, &unsignedp
, &volatilep
, true);
297 /* Extract alignment information from the innermost object and
298 possibly adjust bitpos and offset. */
299 if (TREE_CODE (exp
) == FUNCTION_DECL
)
301 /* Function addresses can encode extra information besides their
302 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
303 allows the low bit to be used as a virtual bit, we know
304 that the address itself must be at least 2-byte aligned. */
305 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
306 align
= 2 * BITS_PER_UNIT
;
308 else if (TREE_CODE (exp
) == LABEL_DECL
)
310 else if (TREE_CODE (exp
) == CONST_DECL
)
312 /* The alignment of a CONST_DECL is determined by its initializer. */
313 exp
= DECL_INITIAL (exp
);
314 align
= TYPE_ALIGN (TREE_TYPE (exp
));
315 #ifdef CONSTANT_ALIGNMENT
316 if (CONSTANT_CLASS_P (exp
))
317 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
319 known_alignment
= true;
321 else if (DECL_P (exp
))
323 align
= DECL_ALIGN (exp
);
324 known_alignment
= true;
326 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
328 align
= TYPE_ALIGN (TREE_TYPE (exp
));
330 else if (TREE_CODE (exp
) == INDIRECT_REF
331 || TREE_CODE (exp
) == MEM_REF
332 || TREE_CODE (exp
) == TARGET_MEM_REF
)
334 tree addr
= TREE_OPERAND (exp
, 0);
336 unsigned HOST_WIDE_INT ptr_bitpos
;
338 if (TREE_CODE (addr
) == BIT_AND_EXPR
339 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
341 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
342 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
343 align
*= BITS_PER_UNIT
;
344 addr
= TREE_OPERAND (addr
, 0);
348 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
349 align
= MAX (ptr_align
, align
);
351 /* The alignment of the pointer operand in a TARGET_MEM_REF
352 has to take the variable offset parts into account. */
353 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
357 unsigned HOST_WIDE_INT step
= 1;
359 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
360 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
362 if (TMR_INDEX2 (exp
))
363 align
= BITS_PER_UNIT
;
364 known_alignment
= false;
367 /* When EXP is an actual memory reference then we can use
368 TYPE_ALIGN of a pointer indirection to derive alignment.
369 Do so only if get_pointer_alignment_1 did not reveal absolute
370 alignment knowledge and if using that alignment would
371 improve the situation. */
372 if (!addr_p
&& !known_alignment
373 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
374 align
= TYPE_ALIGN (TREE_TYPE (exp
));
377 /* Else adjust bitpos accordingly. */
378 bitpos
+= ptr_bitpos
;
379 if (TREE_CODE (exp
) == MEM_REF
380 || TREE_CODE (exp
) == TARGET_MEM_REF
)
381 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
384 else if (TREE_CODE (exp
) == STRING_CST
)
386 /* STRING_CST are the only constant objects we allow to be not
387 wrapped inside a CONST_DECL. */
388 align
= TYPE_ALIGN (TREE_TYPE (exp
));
389 #ifdef CONSTANT_ALIGNMENT
390 if (CONSTANT_CLASS_P (exp
))
391 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
393 known_alignment
= true;
396 /* If there is a non-constant offset part extract the maximum
397 alignment that can prevail. */
403 if (TREE_CODE (offset
) == PLUS_EXPR
)
405 next_offset
= TREE_OPERAND (offset
, 0);
406 offset
= TREE_OPERAND (offset
, 1);
410 if (host_integerp (offset
, 1))
412 /* Any overflow in calculating offset_bits won't change
415 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
418 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
420 else if (TREE_CODE (offset
) == MULT_EXPR
421 && host_integerp (TREE_OPERAND (offset
, 1), 1))
423 /* Any overflow in calculating offset_factor won't change
425 unsigned offset_factor
426 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
430 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
434 inner
= MIN (inner
, BITS_PER_UNIT
);
437 offset
= next_offset
;
439 /* Alignment is innermost object alignment adjusted by the constant
440 and non-constant offset parts. */
441 align
= MIN (align
, inner
);
444 *bitposp
= bitpos
& (*alignp
- 1);
445 return known_alignment
;
448 /* For a memory reference expression EXP compute values M and N such that M
449 divides (&EXP - N) and such that N < M. If these numbers can be determined,
450 store M in alignp and N in *BITPOSP and return true. Otherwise return false
451 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
454 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
455 unsigned HOST_WIDE_INT
*bitposp
)
457 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
460 /* Return the alignment in bits of EXP, an object. */
463 get_object_alignment (tree exp
)
465 unsigned HOST_WIDE_INT bitpos
= 0;
468 get_object_alignment_1 (exp
, &align
, &bitpos
);
470 /* align and bitpos now specify known low bits of the pointer.
471 ptr & (align - 1) == bitpos. */
474 align
= (bitpos
& -bitpos
);
478 /* For a pointer valued expression EXP compute values M and N such that M
479 divides (EXP - N) and such that N < M. If these numbers can be determined,
480 store M in alignp and N in *BITPOSP and return true. Return false if
481 the results are just a conservative approximation.
483 If EXP is not a pointer, false is returned too. */
486 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
487 unsigned HOST_WIDE_INT
*bitposp
)
491 if (TREE_CODE (exp
) == ADDR_EXPR
)
492 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
493 alignp
, bitposp
, true);
494 else if (TREE_CODE (exp
) == SSA_NAME
495 && POINTER_TYPE_P (TREE_TYPE (exp
)))
497 unsigned int ptr_align
, ptr_misalign
;
498 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
500 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
502 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
503 *alignp
= ptr_align
* BITS_PER_UNIT
;
504 /* We cannot really tell whether this result is an approximation. */
510 *alignp
= BITS_PER_UNIT
;
514 else if (TREE_CODE (exp
) == INTEGER_CST
)
516 *alignp
= BIGGEST_ALIGNMENT
;
517 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
518 & (BIGGEST_ALIGNMENT
- 1));
523 *alignp
= BITS_PER_UNIT
;
527 /* Return the alignment in bits of EXP, a pointer valued expression.
528 The alignment returned is, by default, the alignment of the thing that
529 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
531 Otherwise, look at the expression to see if we can do better, i.e., if the
532 expression is actually pointing at an object whose alignment is tighter. */
535 get_pointer_alignment (tree exp
)
537 unsigned HOST_WIDE_INT bitpos
= 0;
540 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
542 /* align and bitpos now specify known low bits of the pointer.
543 ptr & (align - 1) == bitpos. */
546 align
= (bitpos
& -bitpos
);
551 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
552 way, because it could contain a zero byte in the middle.
553 TREE_STRING_LENGTH is the size of the character array, not the string.
555 ONLY_VALUE should be nonzero if the result is not going to be emitted
556 into the instruction stream and zero if it is going to be expanded.
557 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
558 is returned, otherwise NULL, since
559 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
560 evaluate the side-effects.
562 The value returned is of type `ssizetype'.
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
568 c_strlen (tree src
, int only_value
)
571 HOST_WIDE_INT offset
;
577 if (TREE_CODE (src
) == COND_EXPR
578 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
582 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
583 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
584 if (tree_int_cst_equal (len1
, len2
))
588 if (TREE_CODE (src
) == COMPOUND_EXPR
589 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
590 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
592 loc
= EXPR_LOC_OR_HERE (src
);
594 src
= string_constant (src
, &offset_node
);
598 max
= TREE_STRING_LENGTH (src
) - 1;
599 ptr
= TREE_STRING_POINTER (src
);
601 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
608 for (i
= 0; i
< max
; i
++)
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
619 return size_diffop_loc (loc
, size_int (max
), offset_node
);
622 /* We have a known offset into the string. Start searching there for
623 a null character if we can represent it as a single HOST_WIDE_INT. */
624 if (offset_node
== 0)
626 else if (! host_integerp (offset_node
, 0))
629 offset
= tree_low_cst (offset_node
, 0);
631 /* If the offset is known to be out of bounds, warn, and call strlen at
633 if (offset
< 0 || offset
> max
)
635 /* Suppress multiple warnings for propagated constant strings. */
636 if (! TREE_NO_WARNING (src
))
638 warning_at (loc
, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src
) = 1;
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr
+ offset
));
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
661 src
= string_constant (src
, &offset_node
);
665 if (offset_node
== 0)
666 return TREE_STRING_POINTER (src
);
667 else if (!host_integerp (offset_node
, 1)
668 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
671 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
674 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678 c_readstr (const char *str
, enum machine_mode mode
)
684 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
689 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
692 if (WORDS_BIG_ENDIAN
)
693 j
= GET_MODE_SIZE (mode
) - i
- 1;
694 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
695 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
696 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
698 gcc_assert (j
< HOST_BITS_PER_DOUBLE_INT
);
701 ch
= (unsigned char) str
[i
];
702 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
704 return immed_double_const (c
[0], c
[1], mode
);
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
712 target_char_cast (tree cst
, char *p
)
714 unsigned HOST_WIDE_INT val
, hostval
;
716 if (TREE_CODE (cst
) != INTEGER_CST
717 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
720 val
= TREE_INT_CST_LOW (cst
);
721 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
722 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
725 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
726 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
735 /* Similar to save_expr, but assumes that arbitrary code is not executed
736 in between the multiple evaluations. In particular, we assume that a
737 non-addressable local variable will not be modified. */
740 builtin_save_expr (tree exp
)
742 if (TREE_CODE (exp
) == SSA_NAME
743 || (TREE_ADDRESSABLE (exp
) == 0
744 && (TREE_CODE (exp
) == PARM_DECL
745 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
748 return save_expr (exp
);
751 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
752 times to get the address of either a higher stack frame, or a return
753 address located within it (depending on FNDECL_CODE). */
756 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
760 #ifdef INITIAL_FRAME_ADDRESS_RTX
761 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
765 /* For a zero count with __builtin_return_address, we don't care what
766 frame address we return, because target-specific definitions will
767 override us. Therefore frame pointer elimination is OK, and using
768 the soft frame pointer is OK.
770 For a nonzero count, or a zero count with __builtin_frame_address,
771 we require a stable offset from the current frame pointer to the
772 previous one, so we must use the hard frame pointer, and
773 we must disable frame pointer elimination. */
774 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
775 tem
= frame_pointer_rtx
;
778 tem
= hard_frame_pointer_rtx
;
780 /* Tell reload not to eliminate the frame pointer. */
781 crtl
->accesses_prior_frames
= 1;
785 /* Some machines need special handling before we can access
786 arbitrary frames. For example, on the SPARC, we must first flush
787 all register windows to the stack. */
788 #ifdef SETUP_FRAME_ADDRESSES
790 SETUP_FRAME_ADDRESSES ();
793 /* On the SPARC, the return address is not in the frame, it is in a
794 register. There is no way to access it off of the current frame
795 pointer, but it can be accessed off the previous frame pointer by
796 reading the value from the register window save area. */
797 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
798 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
802 /* Scan back COUNT frames to the specified frame. */
803 for (i
= 0; i
< count
; i
++)
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 #ifdef DYNAMIC_CHAIN_ADDRESS
808 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
810 tem
= memory_address (Pmode
, tem
);
811 tem
= gen_frame_mem (Pmode
, tem
);
812 tem
= copy_to_reg (tem
);
815 /* For __builtin_frame_address, return what we've got. But, on
816 the SPARC for example, we may have to add a bias. */
817 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
818 #ifdef FRAME_ADDR_RTX
819 return FRAME_ADDR_RTX (tem
);
824 /* For __builtin_return_address, get the return address from that frame. */
825 #ifdef RETURN_ADDR_RTX
826 tem
= RETURN_ADDR_RTX (count
, tem
);
828 tem
= memory_address (Pmode
,
829 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
830 tem
= gen_frame_mem (Pmode
, tem
);
835 /* Alias set used for setjmp buffer. */
836 static alias_set_type setjmp_alias_set
= -1;
838 /* Construct the leading half of a __builtin_setjmp call. Control will
839 return to RECEIVER_LABEL. This is also called directly by the SJLJ
840 exception handling code. */
843 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
845 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
849 if (setjmp_alias_set
== -1)
850 setjmp_alias_set
= new_alias_set ();
852 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
854 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
856 /* We store the frame pointer and the address of receiver_label in
857 the buffer and use the rest of it for the stack save area, which
858 is machine-dependent. */
860 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
861 set_mem_alias_set (mem
, setjmp_alias_set
);
862 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
864 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
865 GET_MODE_SIZE (Pmode
))),
866 set_mem_alias_set (mem
, setjmp_alias_set
);
868 emit_move_insn (validize_mem (mem
),
869 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
871 stack_save
= gen_rtx_MEM (sa_mode
,
872 plus_constant (Pmode
, buf_addr
,
873 2 * GET_MODE_SIZE (Pmode
)));
874 set_mem_alias_set (stack_save
, setjmp_alias_set
);
875 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
877 /* If there is further processing to do, do it. */
878 #ifdef HAVE_builtin_setjmp_setup
879 if (HAVE_builtin_setjmp_setup
)
880 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
883 /* We have a nonlocal label. */
884 cfun
->has_nonlocal_label
= 1;
887 /* Construct the trailing part of a __builtin_setjmp call. This is
888 also called directly by the SJLJ exception handling code.
889 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
892 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
896 /* Mark the FP as used when we get here, so we have to make sure it's
897 marked as used by this function. */
898 emit_use (hard_frame_pointer_rtx
);
900 /* Mark the static chain as clobbered here so life information
901 doesn't get messed up for it. */
902 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
903 if (chain
&& REG_P (chain
))
904 emit_clobber (chain
);
906 /* Now put in the code to restore the frame pointer, and argument
907 pointer, if needed. */
908 #ifdef HAVE_nonlocal_goto
909 if (! HAVE_nonlocal_goto
)
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
924 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
925 if (fixed_regs
[ARG_POINTER_REGNUM
])
927 #ifdef ELIMINABLE_REGS
928 /* If the argument pointer can be eliminated in favor of the
929 frame pointer, we don't need to restore it. We assume here
930 that if such an elimination is present, it can always be used.
931 This is the case on all known machines; if we don't make this
932 assumption, we do unnecessary saving on many machines. */
934 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
936 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
937 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
938 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
941 if (i
== ARRAY_SIZE (elim_regs
))
944 /* Now restore our arg pointer from the address at which it
945 was saved in our stack frame. */
946 emit_move_insn (crtl
->args
.internal_arg_pointer
,
947 copy_to_reg (get_arg_pointer_save_area ()));
952 #ifdef HAVE_builtin_setjmp_receiver
953 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
954 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
957 #ifdef HAVE_nonlocal_goto_receiver
958 if (HAVE_nonlocal_goto_receiver
)
959 emit_insn (gen_nonlocal_goto_receiver ());
964 /* We must not allow the code we just generated to be reordered by
965 scheduling. Specifically, the update of the frame pointer must
966 happen immediately, not later. */
967 emit_insn (gen_blockage ());
970 /* __builtin_longjmp is passed a pointer to an array of five words (not
971 all will be used on all machines). It operates similarly to the C
972 library function of the same name, but is more efficient. Much of
973 the code below is copied from the handling of non-local gotos. */
976 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
978 rtx fp
, lab
, stack
, insn
, last
;
979 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
981 /* DRAP is needed for stack realign if longjmp is expanded to current
983 if (SUPPORTS_STACK_ALIGNMENT
)
984 crtl
->need_drap
= true;
986 if (setjmp_alias_set
== -1)
987 setjmp_alias_set
= new_alias_set ();
989 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
991 buf_addr
= force_reg (Pmode
, buf_addr
);
993 /* We require that the user must pass a second argument of 1, because
994 that is what builtin_setjmp will return. */
995 gcc_assert (value
== const1_rtx
);
997 last
= get_last_insn ();
998 #ifdef HAVE_builtin_longjmp
999 if (HAVE_builtin_longjmp
)
1000 emit_insn (gen_builtin_longjmp (buf_addr
));
1004 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1005 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1006 GET_MODE_SIZE (Pmode
)));
1008 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1009 2 * GET_MODE_SIZE (Pmode
)));
1010 set_mem_alias_set (fp
, setjmp_alias_set
);
1011 set_mem_alias_set (lab
, setjmp_alias_set
);
1012 set_mem_alias_set (stack
, setjmp_alias_set
);
1014 /* Pick up FP, label, and SP from the block and jump. This code is
1015 from expand_goto in stmt.c; see there for detailed comments. */
1016 #ifdef HAVE_nonlocal_goto
1017 if (HAVE_nonlocal_goto
)
1018 /* We have to pass a value to the nonlocal_goto pattern that will
1019 get copied into the static_chain pointer, but it does not matter
1020 what that value is, because builtin_setjmp does not use it. */
1021 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1025 lab
= copy_to_reg (lab
);
1027 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1028 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1030 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1031 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1033 emit_use (hard_frame_pointer_rtx
);
1034 emit_use (stack_pointer_rtx
);
1035 emit_indirect_jump (lab
);
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1046 gcc_assert (insn
!= last
);
1050 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1053 else if (CALL_P (insn
))
1058 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1059 and the address of the save area. */
1062 expand_builtin_nonlocal_goto (tree exp
)
1064 tree t_label
, t_save_area
;
1065 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1067 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1070 t_label
= CALL_EXPR_ARG (exp
, 0);
1071 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1073 r_label
= expand_normal (t_label
);
1074 r_label
= convert_memory_address (Pmode
, r_label
);
1075 r_save_area
= expand_normal (t_save_area
);
1076 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1077 /* Copy the address of the save location to a register just in case it was
1078 based on the frame pointer. */
1079 r_save_area
= copy_to_reg (r_save_area
);
1080 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1081 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1082 plus_constant (Pmode
, r_save_area
,
1083 GET_MODE_SIZE (Pmode
)));
1085 crtl
->has_nonlocal_goto
= 1;
1087 #ifdef HAVE_nonlocal_goto
1088 /* ??? We no longer need to pass the static chain value, afaik. */
1089 if (HAVE_nonlocal_goto
)
1090 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1094 r_label
= copy_to_reg (r_label
);
1096 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1097 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1099 /* Restore frame pointer for containing function. */
1100 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1101 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1103 /* USE of hard_frame_pointer_rtx added for consistency;
1104 not clear if really needed. */
1105 emit_use (hard_frame_pointer_rtx
);
1106 emit_use (stack_pointer_rtx
);
1108 /* If the architecture is using a GP register, we must
1109 conservatively assume that the target function makes use of it.
1110 The prologue of functions with nonlocal gotos must therefore
1111 initialize the GP register to the appropriate value, and we
1112 must then make sure that this value is live at the point
1113 of the jump. (Note that this doesn't necessarily apply
1114 to targets with a nonlocal_goto pattern; they are free
1115 to implement it in their own way. Note also that this is
1116 a no-op if the GP register is a global invariant.) */
1117 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1118 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1119 emit_use (pic_offset_table_rtx
);
1121 emit_indirect_jump (r_label
);
1124 /* Search backwards to the jump insn and mark it as a
1126 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1130 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1133 else if (CALL_P (insn
))
1140 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1141 (not all will be used on all machines) that was passed to __builtin_setjmp.
1142 It updates the stack pointer in that block to correspond to the current
1146 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1148 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1150 = gen_rtx_MEM (sa_mode
,
1153 plus_constant (Pmode
, buf_addr
,
1154 2 * GET_MODE_SIZE (Pmode
))));
1156 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1159 /* Expand a call to __builtin_prefetch. For a target that does not support
1160 data prefetch, evaluate the memory address argument in case it has side
1164 expand_builtin_prefetch (tree exp
)
1166 tree arg0
, arg1
, arg2
;
1170 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1173 arg0
= CALL_EXPR_ARG (exp
, 0);
1175 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1176 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1178 nargs
= call_expr_nargs (exp
);
1180 arg1
= CALL_EXPR_ARG (exp
, 1);
1182 arg1
= integer_zero_node
;
1184 arg2
= CALL_EXPR_ARG (exp
, 2);
1186 arg2
= integer_three_node
;
1188 /* Argument 0 is an address. */
1189 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1191 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1192 if (TREE_CODE (arg1
) != INTEGER_CST
)
1194 error ("second argument to %<__builtin_prefetch%> must be a constant");
1195 arg1
= integer_zero_node
;
1197 op1
= expand_normal (arg1
);
1198 /* Argument 1 must be either zero or one. */
1199 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1201 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1206 /* Argument 2 (locality) must be a compile-time constant int. */
1207 if (TREE_CODE (arg2
) != INTEGER_CST
)
1209 error ("third argument to %<__builtin_prefetch%> must be a constant");
1210 arg2
= integer_zero_node
;
1212 op2
= expand_normal (arg2
);
1213 /* Argument 2 must be 0, 1, 2, or 3. */
1214 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1216 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1220 #ifdef HAVE_prefetch
1223 struct expand_operand ops
[3];
1225 create_address_operand (&ops
[0], op0
);
1226 create_integer_operand (&ops
[1], INTVAL (op1
));
1227 create_integer_operand (&ops
[2], INTVAL (op2
));
1228 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1233 /* Don't do anything with direct references to volatile memory, but
1234 generate code to handle other side effects. */
1235 if (!MEM_P (op0
) && side_effects_p (op0
))
1239 /* Get a MEM rtx for expression EXP which is the address of an operand
1240 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1241 the maximum length of the block of memory that might be accessed or
1245 get_memory_rtx (tree exp
, tree len
)
1247 tree orig_exp
= exp
;
1250 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1251 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1252 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1253 exp
= TREE_OPERAND (exp
, 0);
1255 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1256 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1258 /* Get an expression we can use to find the attributes to assign to MEM.
1259 First remove any nops. */
1260 while (CONVERT_EXPR_P (exp
)
1261 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1262 exp
= TREE_OPERAND (exp
, 0);
1264 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1265 (as builtin stringops may alias with anything). */
1266 exp
= fold_build2 (MEM_REF
,
1267 build_array_type (char_type_node
,
1268 build_range_type (sizetype
,
1269 size_one_node
, len
)),
1270 exp
, build_int_cst (ptr_type_node
, 0));
1272 /* If the MEM_REF has no acceptable address, try to get the base object
1273 from the original address we got, and build an all-aliasing
1274 unknown-sized access to that one. */
1275 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1276 set_mem_attributes (mem
, exp
, 0);
1277 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1278 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1281 exp
= build_fold_addr_expr (exp
);
1282 exp
= fold_build2 (MEM_REF
,
1283 build_array_type (char_type_node
,
1284 build_range_type (sizetype
,
1287 exp
, build_int_cst (ptr_type_node
, 0));
1288 set_mem_attributes (mem
, exp
, 0);
1290 set_mem_alias_set (mem
, 0);
1294 /* Built-in functions to perform an untyped call and return. */
1296 #define apply_args_mode \
1297 (this_target_builtins->x_apply_args_mode)
1298 #define apply_result_mode \
1299 (this_target_builtins->x_apply_result_mode)
1301 /* Return the size required for the block returned by __builtin_apply_args,
1302 and initialize apply_args_mode. */
1305 apply_args_size (void)
1307 static int size
= -1;
1310 enum machine_mode mode
;
1312 /* The values computed by this function never change. */
1315 /* The first value is the incoming arg-pointer. */
1316 size
= GET_MODE_SIZE (Pmode
);
1318 /* The second value is the structure value address unless this is
1319 passed as an "invisible" first argument. */
1320 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1321 size
+= GET_MODE_SIZE (Pmode
);
1323 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1324 if (FUNCTION_ARG_REGNO_P (regno
))
1326 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1328 gcc_assert (mode
!= VOIDmode
);
1330 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1331 if (size
% align
!= 0)
1332 size
= CEIL (size
, align
) * align
;
1333 size
+= GET_MODE_SIZE (mode
);
1334 apply_args_mode
[regno
] = mode
;
1338 apply_args_mode
[regno
] = VOIDmode
;
1344 /* Return the size required for the block returned by __builtin_apply,
1345 and initialize apply_result_mode. */
1348 apply_result_size (void)
1350 static int size
= -1;
1352 enum machine_mode mode
;
1354 /* The values computed by this function never change. */
1359 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1360 if (targetm
.calls
.function_value_regno_p (regno
))
1362 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1364 gcc_assert (mode
!= VOIDmode
);
1366 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1367 if (size
% align
!= 0)
1368 size
= CEIL (size
, align
) * align
;
1369 size
+= GET_MODE_SIZE (mode
);
1370 apply_result_mode
[regno
] = mode
;
1373 apply_result_mode
[regno
] = VOIDmode
;
1375 /* Allow targets that use untyped_call and untyped_return to override
1376 the size so that machine-specific information can be stored here. */
1377 #ifdef APPLY_RESULT_SIZE
1378 size
= APPLY_RESULT_SIZE
;
1384 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1385 /* Create a vector describing the result block RESULT. If SAVEP is true,
1386 the result block is used to save the values; otherwise it is used to
1387 restore the values. */
1390 result_vector (int savep
, rtx result
)
1392 int regno
, size
, align
, nelts
;
1393 enum machine_mode mode
;
1395 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1398 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1399 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1401 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1402 if (size
% align
!= 0)
1403 size
= CEIL (size
, align
) * align
;
1404 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1405 mem
= adjust_address (result
, mode
, size
);
1406 savevec
[nelts
++] = (savep
1407 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1408 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1409 size
+= GET_MODE_SIZE (mode
);
1411 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1413 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1415 /* Save the state required to perform an untyped call with the same
1416 arguments as were passed to the current function. */
1419 expand_builtin_apply_args_1 (void)
1422 int size
, align
, regno
;
1423 enum machine_mode mode
;
1424 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1426 /* Create a block where the arg-pointer, structure value address,
1427 and argument registers can be saved. */
1428 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1430 /* Walk past the arg-pointer and structure value address. */
1431 size
= GET_MODE_SIZE (Pmode
);
1432 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1433 size
+= GET_MODE_SIZE (Pmode
);
1435 /* Save each register used in calling a function to the block. */
1436 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1437 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1439 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1440 if (size
% align
!= 0)
1441 size
= CEIL (size
, align
) * align
;
1443 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1445 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1446 size
+= GET_MODE_SIZE (mode
);
1449 /* Save the arg pointer to the block. */
1450 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1451 #ifdef STACK_GROWS_DOWNWARD
1452 /* We need the pointer as the caller actually passed them to us, not
1453 as we might have pretended they were passed. Make sure it's a valid
1454 operand, as emit_move_insn isn't expected to handle a PLUS. */
1456 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1459 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1461 size
= GET_MODE_SIZE (Pmode
);
1463 /* Save the structure value address unless this is passed as an
1464 "invisible" first argument. */
1465 if (struct_incoming_value
)
1467 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1468 copy_to_reg (struct_incoming_value
));
1469 size
+= GET_MODE_SIZE (Pmode
);
1472 /* Return the address of the block. */
1473 return copy_addr_to_reg (XEXP (registers
, 0));
1476 /* __builtin_apply_args returns block of memory allocated on
1477 the stack into which is stored the arg pointer, structure
1478 value address, static chain, and all the registers that might
1479 possibly be used in performing a function call. The code is
1480 moved to the start of the function so the incoming values are
1484 expand_builtin_apply_args (void)
1486 /* Don't do __builtin_apply_args more than once in a function.
1487 Save the result of the first call and reuse it. */
1488 if (apply_args_value
!= 0)
1489 return apply_args_value
;
1491 /* When this function is called, it means that registers must be
1492 saved on entry to this function. So we migrate the
1493 call to the first insn of this function. */
1498 temp
= expand_builtin_apply_args_1 ();
1502 apply_args_value
= temp
;
1504 /* Put the insns after the NOTE that starts the function.
1505 If this is inside a start_sequence, make the outer-level insn
1506 chain current, so the code is placed at the start of the
1507 function. If internal_arg_pointer is a non-virtual pseudo,
1508 it needs to be placed after the function that initializes
1510 push_topmost_sequence ();
1511 if (REG_P (crtl
->args
.internal_arg_pointer
)
1512 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1513 emit_insn_before (seq
, parm_birth_insn
);
1515 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1516 pop_topmost_sequence ();
1521 /* Perform an untyped call and save the state required to perform an
1522 untyped return of whatever value was returned by the given function. */
1525 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1527 int size
, align
, regno
;
1528 enum machine_mode mode
;
1529 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1530 rtx old_stack_level
= 0;
1531 rtx call_fusage
= 0;
1532 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1534 arguments
= convert_memory_address (Pmode
, arguments
);
1536 /* Create a block where the return registers can be saved. */
1537 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1539 /* Fetch the arg pointer from the ARGUMENTS block. */
1540 incoming_args
= gen_reg_rtx (Pmode
);
1541 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1542 #ifndef STACK_GROWS_DOWNWARD
1543 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1544 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1547 /* Push a new argument block and copy the arguments. Do not allow
1548 the (potential) memcpy call below to interfere with our stack
1550 do_pending_stack_adjust ();
1553 /* Save the stack with nonlocal if available. */
1554 #ifdef HAVE_save_stack_nonlocal
1555 if (HAVE_save_stack_nonlocal
)
1556 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1559 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1561 /* Allocate a block of memory onto the stack and copy the memory
1562 arguments to the outgoing arguments address. We can pass TRUE
1563 as the 4th argument because we just saved the stack pointer
1564 and will restore it right after the call. */
1565 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1567 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1568 may have already set current_function_calls_alloca to true.
1569 current_function_calls_alloca won't be set if argsize is zero,
1570 so we have to guarantee need_drap is true here. */
1571 if (SUPPORTS_STACK_ALIGNMENT
)
1572 crtl
->need_drap
= true;
1574 dest
= virtual_outgoing_args_rtx
;
1575 #ifndef STACK_GROWS_DOWNWARD
1576 if (CONST_INT_P (argsize
))
1577 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1579 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1581 dest
= gen_rtx_MEM (BLKmode
, dest
);
1582 set_mem_align (dest
, PARM_BOUNDARY
);
1583 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1584 set_mem_align (src
, PARM_BOUNDARY
);
1585 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1587 /* Refer to the argument block. */
1589 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1590 set_mem_align (arguments
, PARM_BOUNDARY
);
1592 /* Walk past the arg-pointer and structure value address. */
1593 size
= GET_MODE_SIZE (Pmode
);
1595 size
+= GET_MODE_SIZE (Pmode
);
1597 /* Restore each of the registers previously saved. Make USE insns
1598 for each of these registers for use in making the call. */
1599 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1600 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1602 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1603 if (size
% align
!= 0)
1604 size
= CEIL (size
, align
) * align
;
1605 reg
= gen_rtx_REG (mode
, regno
);
1606 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1607 use_reg (&call_fusage
, reg
);
1608 size
+= GET_MODE_SIZE (mode
);
1611 /* Restore the structure value address unless this is passed as an
1612 "invisible" first argument. */
1613 size
= GET_MODE_SIZE (Pmode
);
1616 rtx value
= gen_reg_rtx (Pmode
);
1617 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1618 emit_move_insn (struct_value
, value
);
1619 if (REG_P (struct_value
))
1620 use_reg (&call_fusage
, struct_value
);
1621 size
+= GET_MODE_SIZE (Pmode
);
1624 /* All arguments and registers used for the call are set up by now! */
1625 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1627 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1628 and we don't want to load it into a register as an optimization,
1629 because prepare_call_address already did it if it should be done. */
1630 if (GET_CODE (function
) != SYMBOL_REF
)
1631 function
= memory_address (FUNCTION_MODE
, function
);
1633 /* Generate the actual call instruction and save the return value. */
1634 #ifdef HAVE_untyped_call
1635 if (HAVE_untyped_call
)
1636 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1637 result
, result_vector (1, result
)));
1640 #ifdef HAVE_call_value
1641 if (HAVE_call_value
)
1645 /* Locate the unique return register. It is not possible to
1646 express a call that sets more than one return register using
1647 call_value; use untyped_call for that. In fact, untyped_call
1648 only needs to save the return registers in the given block. */
1649 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1650 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1652 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1654 valreg
= gen_rtx_REG (mode
, regno
);
1657 emit_call_insn (GEN_CALL_VALUE (valreg
,
1658 gen_rtx_MEM (FUNCTION_MODE
, function
),
1659 const0_rtx
, NULL_RTX
, const0_rtx
));
1661 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1667 /* Find the CALL insn we just emitted, and attach the register usage
1669 call_insn
= last_call_insn ();
1670 add_function_usage_to (call_insn
, call_fusage
);
1672 /* Restore the stack. */
1673 #ifdef HAVE_save_stack_nonlocal
1674 if (HAVE_save_stack_nonlocal
)
1675 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1678 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1679 fixup_args_size_notes (call_insn
, get_last_insn(), 0);
1683 /* Return the address of the result block. */
1684 result
= copy_addr_to_reg (XEXP (result
, 0));
1685 return convert_memory_address (ptr_mode
, result
);
1688 /* Perform an untyped return. */
1691 expand_builtin_return (rtx result
)
1693 int size
, align
, regno
;
1694 enum machine_mode mode
;
1696 rtx call_fusage
= 0;
1698 result
= convert_memory_address (Pmode
, result
);
1700 apply_result_size ();
1701 result
= gen_rtx_MEM (BLKmode
, result
);
1703 #ifdef HAVE_untyped_return
1704 if (HAVE_untyped_return
)
1706 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1712 /* Restore the return value and note that each value is used. */
1714 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1715 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1717 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1718 if (size
% align
!= 0)
1719 size
= CEIL (size
, align
) * align
;
1720 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1721 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1723 push_to_sequence (call_fusage
);
1725 call_fusage
= get_insns ();
1727 size
+= GET_MODE_SIZE (mode
);
1730 /* Put the USE insns before the return. */
1731 emit_insn (call_fusage
);
1733 /* Return whatever values was restored by jumping directly to the end
1735 expand_naked_return ();
1738 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1740 static enum type_class
1741 type_to_class (tree type
)
1743 switch (TREE_CODE (type
))
1745 case VOID_TYPE
: return void_type_class
;
1746 case INTEGER_TYPE
: return integer_type_class
;
1747 case ENUMERAL_TYPE
: return enumeral_type_class
;
1748 case BOOLEAN_TYPE
: return boolean_type_class
;
1749 case POINTER_TYPE
: return pointer_type_class
;
1750 case REFERENCE_TYPE
: return reference_type_class
;
1751 case OFFSET_TYPE
: return offset_type_class
;
1752 case REAL_TYPE
: return real_type_class
;
1753 case COMPLEX_TYPE
: return complex_type_class
;
1754 case FUNCTION_TYPE
: return function_type_class
;
1755 case METHOD_TYPE
: return method_type_class
;
1756 case RECORD_TYPE
: return record_type_class
;
1758 case QUAL_UNION_TYPE
: return union_type_class
;
1759 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1760 ? string_type_class
: array_type_class
);
1761 case LANG_TYPE
: return lang_type_class
;
1762 default: return no_type_class
;
1766 /* Expand a call EXP to __builtin_classify_type. */
1769 expand_builtin_classify_type (tree exp
)
1771 if (call_expr_nargs (exp
))
1772 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1773 return GEN_INT (no_type_class
);
1776 /* This helper macro, meant to be used in mathfn_built_in below,
1777 determines which among a set of three builtin math functions is
1778 appropriate for a given type mode. The `F' and `L' cases are
1779 automatically generated from the `double' case. */
1780 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1781 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1782 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1783 fcodel = BUILT_IN_MATHFN##L ; break;
1784 /* Similar to above, but appends _R after any F/L suffix. */
1785 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1786 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1787 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1788 fcodel = BUILT_IN_MATHFN##L_R ; break;
1790 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1791 if available. If IMPLICIT is true use the implicit builtin declaration,
1792 otherwise use the explicit declaration. If we can't do the conversion,
1796 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1798 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1802 CASE_MATHFN (BUILT_IN_ACOS
)
1803 CASE_MATHFN (BUILT_IN_ACOSH
)
1804 CASE_MATHFN (BUILT_IN_ASIN
)
1805 CASE_MATHFN (BUILT_IN_ASINH
)
1806 CASE_MATHFN (BUILT_IN_ATAN
)
1807 CASE_MATHFN (BUILT_IN_ATAN2
)
1808 CASE_MATHFN (BUILT_IN_ATANH
)
1809 CASE_MATHFN (BUILT_IN_CBRT
)
1810 CASE_MATHFN (BUILT_IN_CEIL
)
1811 CASE_MATHFN (BUILT_IN_CEXPI
)
1812 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1813 CASE_MATHFN (BUILT_IN_COS
)
1814 CASE_MATHFN (BUILT_IN_COSH
)
1815 CASE_MATHFN (BUILT_IN_DREM
)
1816 CASE_MATHFN (BUILT_IN_ERF
)
1817 CASE_MATHFN (BUILT_IN_ERFC
)
1818 CASE_MATHFN (BUILT_IN_EXP
)
1819 CASE_MATHFN (BUILT_IN_EXP10
)
1820 CASE_MATHFN (BUILT_IN_EXP2
)
1821 CASE_MATHFN (BUILT_IN_EXPM1
)
1822 CASE_MATHFN (BUILT_IN_FABS
)
1823 CASE_MATHFN (BUILT_IN_FDIM
)
1824 CASE_MATHFN (BUILT_IN_FLOOR
)
1825 CASE_MATHFN (BUILT_IN_FMA
)
1826 CASE_MATHFN (BUILT_IN_FMAX
)
1827 CASE_MATHFN (BUILT_IN_FMIN
)
1828 CASE_MATHFN (BUILT_IN_FMOD
)
1829 CASE_MATHFN (BUILT_IN_FREXP
)
1830 CASE_MATHFN (BUILT_IN_GAMMA
)
1831 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1832 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1833 CASE_MATHFN (BUILT_IN_HYPOT
)
1834 CASE_MATHFN (BUILT_IN_ILOGB
)
1835 CASE_MATHFN (BUILT_IN_ICEIL
)
1836 CASE_MATHFN (BUILT_IN_IFLOOR
)
1837 CASE_MATHFN (BUILT_IN_INF
)
1838 CASE_MATHFN (BUILT_IN_IRINT
)
1839 CASE_MATHFN (BUILT_IN_IROUND
)
1840 CASE_MATHFN (BUILT_IN_ISINF
)
1841 CASE_MATHFN (BUILT_IN_J0
)
1842 CASE_MATHFN (BUILT_IN_J1
)
1843 CASE_MATHFN (BUILT_IN_JN
)
1844 CASE_MATHFN (BUILT_IN_LCEIL
)
1845 CASE_MATHFN (BUILT_IN_LDEXP
)
1846 CASE_MATHFN (BUILT_IN_LFLOOR
)
1847 CASE_MATHFN (BUILT_IN_LGAMMA
)
1848 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1849 CASE_MATHFN (BUILT_IN_LLCEIL
)
1850 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1851 CASE_MATHFN (BUILT_IN_LLRINT
)
1852 CASE_MATHFN (BUILT_IN_LLROUND
)
1853 CASE_MATHFN (BUILT_IN_LOG
)
1854 CASE_MATHFN (BUILT_IN_LOG10
)
1855 CASE_MATHFN (BUILT_IN_LOG1P
)
1856 CASE_MATHFN (BUILT_IN_LOG2
)
1857 CASE_MATHFN (BUILT_IN_LOGB
)
1858 CASE_MATHFN (BUILT_IN_LRINT
)
1859 CASE_MATHFN (BUILT_IN_LROUND
)
1860 CASE_MATHFN (BUILT_IN_MODF
)
1861 CASE_MATHFN (BUILT_IN_NAN
)
1862 CASE_MATHFN (BUILT_IN_NANS
)
1863 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1864 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1865 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1866 CASE_MATHFN (BUILT_IN_POW
)
1867 CASE_MATHFN (BUILT_IN_POWI
)
1868 CASE_MATHFN (BUILT_IN_POW10
)
1869 CASE_MATHFN (BUILT_IN_REMAINDER
)
1870 CASE_MATHFN (BUILT_IN_REMQUO
)
1871 CASE_MATHFN (BUILT_IN_RINT
)
1872 CASE_MATHFN (BUILT_IN_ROUND
)
1873 CASE_MATHFN (BUILT_IN_SCALB
)
1874 CASE_MATHFN (BUILT_IN_SCALBLN
)
1875 CASE_MATHFN (BUILT_IN_SCALBN
)
1876 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1877 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1878 CASE_MATHFN (BUILT_IN_SIN
)
1879 CASE_MATHFN (BUILT_IN_SINCOS
)
1880 CASE_MATHFN (BUILT_IN_SINH
)
1881 CASE_MATHFN (BUILT_IN_SQRT
)
1882 CASE_MATHFN (BUILT_IN_TAN
)
1883 CASE_MATHFN (BUILT_IN_TANH
)
1884 CASE_MATHFN (BUILT_IN_TGAMMA
)
1885 CASE_MATHFN (BUILT_IN_TRUNC
)
1886 CASE_MATHFN (BUILT_IN_Y0
)
1887 CASE_MATHFN (BUILT_IN_Y1
)
1888 CASE_MATHFN (BUILT_IN_YN
)
1894 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1896 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1898 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1903 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1906 return builtin_decl_explicit (fcode2
);
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1912 mathfn_built_in (tree type
, enum built_in_function fn
)
1914 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1922 expand_errno_check (tree exp
, rtx target
)
1924 rtx lab
= gen_label_rtx ();
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1929 NULL_RTX
, NULL_RTX
, lab
,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx
= GEN_ERRNO_RTX
;
1941 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1943 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp
) = 0;
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1955 expand_call (exp
, target
, 0);
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1967 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1969 optab builtin_optab
;
1971 tree fndecl
= get_callee_fndecl (exp
);
1972 enum machine_mode mode
;
1973 bool errno_set
= false;
1976 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1979 arg
= CALL_EXPR_ARG (exp
, 0);
1981 switch (DECL_FUNCTION_CODE (fndecl
))
1983 CASE_FLT_FN (BUILT_IN_SQRT
):
1984 errno_set
= ! tree_expr_nonnegative_p (arg
);
1985 builtin_optab
= sqrt_optab
;
1987 CASE_FLT_FN (BUILT_IN_EXP
):
1988 errno_set
= true; builtin_optab
= exp_optab
; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10
):
1990 CASE_FLT_FN (BUILT_IN_POW10
):
1991 errno_set
= true; builtin_optab
= exp10_optab
; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2
):
1993 errno_set
= true; builtin_optab
= exp2_optab
; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1
):
1995 errno_set
= true; builtin_optab
= expm1_optab
; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB
):
1997 errno_set
= true; builtin_optab
= logb_optab
; break;
1998 CASE_FLT_FN (BUILT_IN_LOG
):
1999 errno_set
= true; builtin_optab
= log_optab
; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10
):
2001 errno_set
= true; builtin_optab
= log10_optab
; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2
):
2003 errno_set
= true; builtin_optab
= log2_optab
; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P
):
2005 errno_set
= true; builtin_optab
= log1p_optab
; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN
):
2007 builtin_optab
= asin_optab
; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS
):
2009 builtin_optab
= acos_optab
; break;
2010 CASE_FLT_FN (BUILT_IN_TAN
):
2011 builtin_optab
= tan_optab
; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN
):
2013 builtin_optab
= atan_optab
; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR
):
2015 builtin_optab
= floor_optab
; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL
):
2017 builtin_optab
= ceil_optab
; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC
):
2019 builtin_optab
= btrunc_optab
; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND
):
2021 builtin_optab
= round_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2023 builtin_optab
= nearbyint_optab
;
2024 if (flag_trapping_math
)
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT
):
2028 builtin_optab
= rint_optab
; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2030 builtin_optab
= significand_optab
; break;
2035 /* Make a suitable register to place result in. */
2036 mode
= TYPE_MODE (TREE_TYPE (exp
));
2038 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2043 && (!errno_set
|| !optimize_insn_for_size_p ()))
2045 target
= gen_reg_rtx (mode
);
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2052 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2056 /* Compute into TARGET.
2057 Set TARGET to wherever the result comes back. */
2058 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2063 expand_errno_check (exp
, target
);
2065 /* Output the entire sequence. */
2066 insns
= get_insns ();
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2078 return expand_call (exp
, target
, target
== const0_rtx
);
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2089 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2091 optab builtin_optab
;
2092 rtx op0
, op1
, insns
;
2093 int op1_type
= REAL_TYPE
;
2094 tree fndecl
= get_callee_fndecl (exp
);
2096 enum machine_mode mode
;
2097 bool errno_set
= true;
2099 switch (DECL_FUNCTION_CODE (fndecl
))
2101 CASE_FLT_FN (BUILT_IN_SCALBN
):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2103 CASE_FLT_FN (BUILT_IN_LDEXP
):
2104 op1_type
= INTEGER_TYPE
;
2109 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2112 arg0
= CALL_EXPR_ARG (exp
, 0);
2113 arg1
= CALL_EXPR_ARG (exp
, 1);
2115 switch (DECL_FUNCTION_CODE (fndecl
))
2117 CASE_FLT_FN (BUILT_IN_POW
):
2118 builtin_optab
= pow_optab
; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2
):
2120 builtin_optab
= atan2_optab
; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB
):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2124 builtin_optab
= scalb_optab
; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN
):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP
):
2131 builtin_optab
= ldexp_optab
; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD
):
2133 builtin_optab
= fmod_optab
; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2135 CASE_FLT_FN (BUILT_IN_DREM
):
2136 builtin_optab
= remainder_optab
; break;
2141 /* Make a suitable register to place result in. */
2142 mode
= TYPE_MODE (TREE_TYPE (exp
));
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2148 target
= gen_reg_rtx (mode
);
2150 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2153 if (errno_set
&& optimize_insn_for_size_p ())
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2158 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2160 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2161 op1
= expand_normal (arg1
);
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2168 target
, 0, OPTAB_DIRECT
);
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2176 return expand_call (exp
, target
, target
== const0_rtx
);
2180 expand_errno_check (exp
, target
);
2182 /* Output the entire sequence. */
2183 insns
= get_insns ();
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2198 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2200 optab builtin_optab
;
2201 rtx op0
, op1
, op2
, insns
;
2202 tree fndecl
= get_callee_fndecl (exp
);
2203 tree arg0
, arg1
, arg2
;
2204 enum machine_mode mode
;
2206 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2209 arg0
= CALL_EXPR_ARG (exp
, 0);
2210 arg1
= CALL_EXPR_ARG (exp
, 1);
2211 arg2
= CALL_EXPR_ARG (exp
, 2);
2213 switch (DECL_FUNCTION_CODE (fndecl
))
2215 CASE_FLT_FN (BUILT_IN_FMA
):
2216 builtin_optab
= fma_optab
; break;
2221 /* Make a suitable register to place result in. */
2222 mode
= TYPE_MODE (TREE_TYPE (exp
));
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2228 target
= gen_reg_rtx (mode
);
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2232 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2233 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2235 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2236 op1
= expand_normal (arg1
);
2237 op2
= expand_normal (arg2
);
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 target
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2252 return expand_call (exp
, target
, target
== const0_rtx
);
2255 /* Output the entire sequence. */
2256 insns
= get_insns ();
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2271 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2273 optab builtin_optab
;
2275 tree fndecl
= get_callee_fndecl (exp
);
2276 enum machine_mode mode
;
2279 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2282 arg
= CALL_EXPR_ARG (exp
, 0);
2284 switch (DECL_FUNCTION_CODE (fndecl
))
2286 CASE_FLT_FN (BUILT_IN_SIN
):
2287 CASE_FLT_FN (BUILT_IN_COS
):
2288 builtin_optab
= sincos_optab
; break;
2293 /* Make a suitable register to place result in. */
2294 mode
= TYPE_MODE (TREE_TYPE (exp
));
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2299 switch (DECL_FUNCTION_CODE (fndecl
))
2301 CASE_FLT_FN (BUILT_IN_SIN
):
2302 builtin_optab
= sin_optab
; break;
2303 CASE_FLT_FN (BUILT_IN_COS
):
2304 builtin_optab
= cos_optab
; break;
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2312 target
= gen_reg_rtx (mode
);
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2319 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2323 /* Compute into TARGET.
2324 Set TARGET to wherever the result comes back. */
2325 if (builtin_optab
== sincos_optab
)
2329 switch (DECL_FUNCTION_CODE (fndecl
))
2331 CASE_FLT_FN (BUILT_IN_SIN
):
2332 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2334 CASE_FLT_FN (BUILT_IN_COS
):
2335 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2340 gcc_assert (result
);
2344 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2349 /* Output the entire sequence. */
2350 insns
= get_insns ();
2356 /* If we were unable to expand via the builtin, stop the sequence
2357 (without outputting the insns) and call to the library function
2358 with the stabilized argument list. */
2362 target
= expand_call (exp
, target
, target
== const0_rtx
);
2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2368 return an RTL instruction code that implements the functionality.
2369 If that isn't possible or available return CODE_FOR_nothing. */
2371 static enum insn_code
2372 interclass_mathfn_icode (tree arg
, tree fndecl
)
2374 bool errno_set
= false;
2375 optab builtin_optab
= unknown_optab
;
2376 enum machine_mode mode
;
2378 switch (DECL_FUNCTION_CODE (fndecl
))
2380 CASE_FLT_FN (BUILT_IN_ILOGB
):
2381 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2382 CASE_FLT_FN (BUILT_IN_ISINF
):
2383 builtin_optab
= isinf_optab
; break;
2384 case BUILT_IN_ISNORMAL
:
2385 case BUILT_IN_ISFINITE
:
2386 CASE_FLT_FN (BUILT_IN_FINITE
):
2387 case BUILT_IN_FINITED32
:
2388 case BUILT_IN_FINITED64
:
2389 case BUILT_IN_FINITED128
:
2390 case BUILT_IN_ISINFD32
:
2391 case BUILT_IN_ISINFD64
:
2392 case BUILT_IN_ISINFD128
:
2393 /* These builtins have no optabs (yet). */
2399 /* There's no easy way to detect the case we need to set EDOM. */
2400 if (flag_errno_math
&& errno_set
)
2401 return CODE_FOR_nothing
;
2403 /* Optab mode depends on the mode of the input argument. */
2404 mode
= TYPE_MODE (TREE_TYPE (arg
));
2407 return optab_handler (builtin_optab
, mode
);
2408 return CODE_FOR_nothing
;
2411 /* Expand a call to one of the builtin math functions that operate on
2412 floating point argument and output an integer result (ilogb, isinf,
2414 Return 0 if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2416 function; if convenient, the result should be placed in TARGET. */
2419 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2421 enum insn_code icode
= CODE_FOR_nothing
;
2423 tree fndecl
= get_callee_fndecl (exp
);
2424 enum machine_mode mode
;
2427 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2430 arg
= CALL_EXPR_ARG (exp
, 0);
2431 icode
= interclass_mathfn_icode (arg
, fndecl
);
2432 mode
= TYPE_MODE (TREE_TYPE (arg
));
2434 if (icode
!= CODE_FOR_nothing
)
2436 struct expand_operand ops
[1];
2437 rtx last
= get_last_insn ();
2438 tree orig_arg
= arg
;
2440 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2441 need to expand the argument again. This way, we will not perform
2442 side-effects more the once. */
2443 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2445 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2447 if (mode
!= GET_MODE (op0
))
2448 op0
= convert_to_mode (mode
, op0
, 0);
2450 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2451 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2452 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2453 return ops
[0].value
;
2455 delete_insns_since (last
);
2456 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2462 /* Expand a call to the builtin sincos math function.
2463 Return NULL_RTX if a normal call should be emitted rather than expanding the
2464 function in-line. EXP is the expression that is a call to the builtin
2468 expand_builtin_sincos (tree exp
)
2470 rtx op0
, op1
, op2
, target1
, target2
;
2471 enum machine_mode mode
;
2472 tree arg
, sinp
, cosp
;
2474 location_t loc
= EXPR_LOCATION (exp
);
2475 tree alias_type
, alias_off
;
2477 if (!validate_arglist (exp
, REAL_TYPE
,
2478 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2481 arg
= CALL_EXPR_ARG (exp
, 0);
2482 sinp
= CALL_EXPR_ARG (exp
, 1);
2483 cosp
= CALL_EXPR_ARG (exp
, 2);
2485 /* Make a suitable register to place result in. */
2486 mode
= TYPE_MODE (TREE_TYPE (arg
));
2488 /* Check if sincos insn is available, otherwise emit the call. */
2489 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2492 target1
= gen_reg_rtx (mode
);
2493 target2
= gen_reg_rtx (mode
);
2495 op0
= expand_normal (arg
);
2496 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2497 alias_off
= build_int_cst (alias_type
, 0);
2498 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2500 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2503 /* Compute into target1 and target2.
2504 Set TARGET to wherever the result comes back. */
2505 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2506 gcc_assert (result
);
2508 /* Move target1 and target2 to the memory locations indicated
2510 emit_move_insn (op1
, target1
);
2511 emit_move_insn (op2
, target2
);
2516 /* Expand a call to the internal cexpi builtin to the sincos math function.
2517 EXP is the expression that is a call to the builtin function; if convenient,
2518 the result should be placed in TARGET. */
2521 expand_builtin_cexpi (tree exp
, rtx target
)
2523 tree fndecl
= get_callee_fndecl (exp
);
2525 enum machine_mode mode
;
2527 location_t loc
= EXPR_LOCATION (exp
);
2529 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2532 arg
= CALL_EXPR_ARG (exp
, 0);
2533 type
= TREE_TYPE (arg
);
2534 mode
= TYPE_MODE (TREE_TYPE (arg
));
2536 /* Try expanding via a sincos optab, fall back to emitting a libcall
2537 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2538 is only generated from sincos, cexp or if we have either of them. */
2539 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2541 op1
= gen_reg_rtx (mode
);
2542 op2
= gen_reg_rtx (mode
);
2544 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2546 /* Compute into op1 and op2. */
2547 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2549 else if (TARGET_HAS_SINCOS
)
2551 tree call
, fn
= NULL_TREE
;
2555 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2556 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2557 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2558 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2559 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2560 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2564 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2565 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2566 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2567 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2568 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2569 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2571 /* Make sure not to fold the sincos call again. */
2572 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2573 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2574 call
, 3, arg
, top1
, top2
));
2578 tree call
, fn
= NULL_TREE
, narg
;
2579 tree ctype
= build_complex_type (type
);
2581 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2582 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2583 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2584 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2585 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2586 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2590 /* If we don't have a decl for cexp create one. This is the
2591 friendliest fallback if the user calls __builtin_cexpi
2592 without full target C99 function support. */
2593 if (fn
== NULL_TREE
)
2596 const char *name
= NULL
;
2598 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2600 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2602 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2605 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2606 fn
= build_fn_decl (name
, fntype
);
2609 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2610 build_real (type
, dconst0
), arg
);
2612 /* Make sure not to fold the cexp call again. */
2613 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2614 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2615 target
, VOIDmode
, EXPAND_NORMAL
);
2618 /* Now build the proper return type. */
2619 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2620 make_tree (TREE_TYPE (arg
), op2
),
2621 make_tree (TREE_TYPE (arg
), op1
)),
2622 target
, VOIDmode
, EXPAND_NORMAL
);
2625 /* Conveniently construct a function call expression. FNDECL names the
2626 function to be called, N is the number of arguments, and the "..."
2627 parameters are the argument expressions. Unlike build_call_exr
2628 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2631 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2634 tree fntype
= TREE_TYPE (fndecl
);
2635 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2638 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2640 SET_EXPR_LOCATION (fn
, loc
);
2644 /* Expand a call to one of the builtin rounding functions gcc defines
2645 as an extension (lfloor and lceil). As these are gcc extensions we
2646 do not need to worry about setting errno to EDOM.
2647 If expanding via optab fails, lower expression to (int)(floor(x)).
2648 EXP is the expression that is a call to the builtin function;
2649 if convenient, the result should be placed in TARGET. */
2652 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2654 convert_optab builtin_optab
;
2655 rtx op0
, insns
, tmp
;
2656 tree fndecl
= get_callee_fndecl (exp
);
2657 enum built_in_function fallback_fn
;
2658 tree fallback_fndecl
;
2659 enum machine_mode mode
;
2662 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2665 arg
= CALL_EXPR_ARG (exp
, 0);
2667 switch (DECL_FUNCTION_CODE (fndecl
))
2669 CASE_FLT_FN (BUILT_IN_ICEIL
):
2670 CASE_FLT_FN (BUILT_IN_LCEIL
):
2671 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2672 builtin_optab
= lceil_optab
;
2673 fallback_fn
= BUILT_IN_CEIL
;
2676 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2677 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2678 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2679 builtin_optab
= lfloor_optab
;
2680 fallback_fn
= BUILT_IN_FLOOR
;
2687 /* Make a suitable register to place result in. */
2688 mode
= TYPE_MODE (TREE_TYPE (exp
));
2690 target
= gen_reg_rtx (mode
);
2692 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2693 need to expand the argument again. This way, we will not perform
2694 side-effects more the once. */
2695 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2697 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2701 /* Compute into TARGET. */
2702 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2704 /* Output the entire sequence. */
2705 insns
= get_insns ();
2711 /* If we were unable to expand via the builtin, stop the sequence
2712 (without outputting the insns). */
2715 /* Fall back to floating point rounding optab. */
2716 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2718 /* For non-C99 targets we may end up without a fallback fndecl here
2719 if the user called __builtin_lfloor directly. In this case emit
2720 a call to the floor/ceil variants nevertheless. This should result
2721 in the best user experience for not full C99 targets. */
2722 if (fallback_fndecl
== NULL_TREE
)
2725 const char *name
= NULL
;
2727 switch (DECL_FUNCTION_CODE (fndecl
))
2729 case BUILT_IN_ICEIL
:
2730 case BUILT_IN_LCEIL
:
2731 case BUILT_IN_LLCEIL
:
2734 case BUILT_IN_ICEILF
:
2735 case BUILT_IN_LCEILF
:
2736 case BUILT_IN_LLCEILF
:
2739 case BUILT_IN_ICEILL
:
2740 case BUILT_IN_LCEILL
:
2741 case BUILT_IN_LLCEILL
:
2744 case BUILT_IN_IFLOOR
:
2745 case BUILT_IN_LFLOOR
:
2746 case BUILT_IN_LLFLOOR
:
2749 case BUILT_IN_IFLOORF
:
2750 case BUILT_IN_LFLOORF
:
2751 case BUILT_IN_LLFLOORF
:
2754 case BUILT_IN_IFLOORL
:
2755 case BUILT_IN_LFLOORL
:
2756 case BUILT_IN_LLFLOORL
:
2763 fntype
= build_function_type_list (TREE_TYPE (arg
),
2764 TREE_TYPE (arg
), NULL_TREE
);
2765 fallback_fndecl
= build_fn_decl (name
, fntype
);
2768 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2770 tmp
= expand_normal (exp
);
2772 /* Truncate the result of floating point optab to integer
2773 via expand_fix (). */
2774 target
= gen_reg_rtx (mode
);
2775 expand_fix (target
, tmp
, 0);
2780 /* Expand a call to one of the builtin math functions doing integer
2782 Return 0 if a normal call should be emitted rather than expanding the
2783 function in-line. EXP is the expression that is a call to the builtin
2784 function; if convenient, the result should be placed in TARGET. */
2787 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2789 convert_optab builtin_optab
;
2791 tree fndecl
= get_callee_fndecl (exp
);
2793 enum machine_mode mode
;
2794 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2796 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2799 arg
= CALL_EXPR_ARG (exp
, 0);
2801 switch (DECL_FUNCTION_CODE (fndecl
))
2803 CASE_FLT_FN (BUILT_IN_IRINT
):
2804 fallback_fn
= BUILT_IN_LRINT
;
2806 CASE_FLT_FN (BUILT_IN_LRINT
):
2807 CASE_FLT_FN (BUILT_IN_LLRINT
):
2808 builtin_optab
= lrint_optab
;
2811 CASE_FLT_FN (BUILT_IN_IROUND
):
2812 fallback_fn
= BUILT_IN_LROUND
;
2814 CASE_FLT_FN (BUILT_IN_LROUND
):
2815 CASE_FLT_FN (BUILT_IN_LLROUND
):
2816 builtin_optab
= lround_optab
;
2823 /* There's no easy way to detect the case we need to set EDOM. */
2824 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2827 /* Make a suitable register to place result in. */
2828 mode
= TYPE_MODE (TREE_TYPE (exp
));
2830 /* There's no easy way to detect the case we need to set EDOM. */
2831 if (!flag_errno_math
)
2833 target
= gen_reg_rtx (mode
);
2835 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2836 need to expand the argument again. This way, we will not perform
2837 side-effects more the once. */
2838 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2840 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2844 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2846 /* Output the entire sequence. */
2847 insns
= get_insns ();
2853 /* If we were unable to expand via the builtin, stop the sequence
2854 (without outputting the insns) and call to the library function
2855 with the stabilized argument list. */
2859 if (fallback_fn
!= BUILT_IN_NONE
)
2861 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2862 targets, (int) round (x) should never be transformed into
2863 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2864 a call to lround in the hope that the target provides at least some
2865 C99 functions. This should result in the best user experience for
2866 not full C99 targets. */
2867 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2870 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2871 fallback_fndecl
, 1, arg
);
2873 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2874 return convert_to_mode (mode
, target
, 0);
2877 target
= expand_call (exp
, target
, target
== const0_rtx
);
2882 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2883 a normal call should be emitted rather than expanding the function
2884 in-line. EXP is the expression that is a call to the builtin
2885 function; if convenient, the result should be placed in TARGET. */
2888 expand_builtin_powi (tree exp
, rtx target
)
2892 enum machine_mode mode
;
2893 enum machine_mode mode2
;
2895 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2898 arg0
= CALL_EXPR_ARG (exp
, 0);
2899 arg1
= CALL_EXPR_ARG (exp
, 1);
2900 mode
= TYPE_MODE (TREE_TYPE (exp
));
2902 /* Emit a libcall to libgcc. */
2904 /* Mode of the 2nd argument must match that of an int. */
2905 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2907 if (target
== NULL_RTX
)
2908 target
= gen_reg_rtx (mode
);
2910 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2911 if (GET_MODE (op0
) != mode
)
2912 op0
= convert_to_mode (mode
, op0
, 0);
2913 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2914 if (GET_MODE (op1
) != mode2
)
2915 op1
= convert_to_mode (mode2
, op1
, 0);
2917 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2918 target
, LCT_CONST
, mode
, 2,
2919 op0
, mode
, op1
, mode2
);
2924 /* Expand expression EXP which is a call to the strlen builtin. Return
2925 NULL_RTX if we failed the caller should emit a normal call, otherwise
2926 try to get the result in TARGET, if convenient. */
2929 expand_builtin_strlen (tree exp
, rtx target
,
2930 enum machine_mode target_mode
)
2932 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2936 struct expand_operand ops
[4];
2939 tree src
= CALL_EXPR_ARG (exp
, 0);
2940 rtx src_reg
, before_strlen
;
2941 enum machine_mode insn_mode
= target_mode
;
2942 enum insn_code icode
= CODE_FOR_nothing
;
2945 /* If the length can be computed at compile-time, return it. */
2946 len
= c_strlen (src
, 0);
2948 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2950 /* If the length can be computed at compile-time and is constant
2951 integer, but there are side-effects in src, evaluate
2952 src for side-effects, then return len.
2953 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2954 can be optimized into: i++; x = 3; */
2955 len
= c_strlen (src
, 1);
2956 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2958 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2959 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2962 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2964 /* If SRC is not a pointer type, don't do this operation inline. */
2968 /* Bail out if we can't compute strlen in the right mode. */
2969 while (insn_mode
!= VOIDmode
)
2971 icode
= optab_handler (strlen_optab
, insn_mode
);
2972 if (icode
!= CODE_FOR_nothing
)
2975 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2977 if (insn_mode
== VOIDmode
)
2980 /* Make a place to hold the source address. We will not expand
2981 the actual source until we are sure that the expansion will
2982 not fail -- there are trees that cannot be expanded twice. */
2983 src_reg
= gen_reg_rtx (Pmode
);
2985 /* Mark the beginning of the strlen sequence so we can emit the
2986 source operand later. */
2987 before_strlen
= get_last_insn ();
2989 create_output_operand (&ops
[0], target
, insn_mode
);
2990 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2991 create_integer_operand (&ops
[2], 0);
2992 create_integer_operand (&ops
[3], align
);
2993 if (!maybe_expand_insn (icode
, 4, ops
))
2996 /* Now that we are assured of success, expand the source. */
2998 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3001 #ifdef POINTERS_EXTEND_UNSIGNED
3002 if (GET_MODE (pat
) != Pmode
)
3003 pat
= convert_to_mode (Pmode
, pat
,
3004 POINTERS_EXTEND_UNSIGNED
);
3006 emit_move_insn (src_reg
, pat
);
3012 emit_insn_after (pat
, before_strlen
);
3014 emit_insn_before (pat
, get_insns ());
3016 /* Return the value in the proper mode for this function. */
3017 if (GET_MODE (ops
[0].value
) == target_mode
)
3018 target
= ops
[0].value
;
3019 else if (target
!= 0)
3020 convert_move (target
, ops
[0].value
, 0);
3022 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3028 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3029 bytes from constant string DATA + OFFSET and return it as target
3033 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3034 enum machine_mode mode
)
3036 const char *str
= (const char *) data
;
3038 gcc_assert (offset
>= 0
3039 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3040 <= strlen (str
) + 1));
3042 return c_readstr (str
+ offset
, mode
);
3045 /* Expand a call EXP to the memcpy builtin.
3046 Return NULL_RTX if we failed, the caller should emit a normal call,
3047 otherwise try to get the result in TARGET, if convenient (and in
3048 mode MODE if that's convenient). */
3051 expand_builtin_memcpy (tree exp
, rtx target
)
3053 if (!validate_arglist (exp
,
3054 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3058 tree dest
= CALL_EXPR_ARG (exp
, 0);
3059 tree src
= CALL_EXPR_ARG (exp
, 1);
3060 tree len
= CALL_EXPR_ARG (exp
, 2);
3061 const char *src_str
;
3062 unsigned int src_align
= get_pointer_alignment (src
);
3063 unsigned int dest_align
= get_pointer_alignment (dest
);
3064 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3065 HOST_WIDE_INT expected_size
= -1;
3066 unsigned int expected_align
= 0;
3068 /* If DEST is not a pointer type, call the normal function. */
3069 if (dest_align
== 0)
3072 /* If either SRC is not a pointer type, don't do this
3073 operation in-line. */
3077 if (currently_expanding_gimple_stmt
)
3078 stringop_block_profile (currently_expanding_gimple_stmt
,
3079 &expected_align
, &expected_size
);
3081 if (expected_align
< dest_align
)
3082 expected_align
= dest_align
;
3083 dest_mem
= get_memory_rtx (dest
, len
);
3084 set_mem_align (dest_mem
, dest_align
);
3085 len_rtx
= expand_normal (len
);
3086 src_str
= c_getstr (src
);
3088 /* If SRC is a string constant and block move would be done
3089 by pieces, we can avoid loading the string from memory
3090 and only stored the computed constants. */
3092 && CONST_INT_P (len_rtx
)
3093 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3094 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3095 CONST_CAST (char *, src_str
),
3098 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3099 builtin_memcpy_read_str
,
3100 CONST_CAST (char *, src_str
),
3101 dest_align
, false, 0);
3102 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3103 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3107 src_mem
= get_memory_rtx (src
, len
);
3108 set_mem_align (src_mem
, src_align
);
3110 /* Copy word part most expediently. */
3111 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3112 CALL_EXPR_TAILCALL (exp
)
3113 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3114 expected_align
, expected_size
);
3118 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3119 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3125 /* Expand a call EXP to the mempcpy builtin.
3126 Return NULL_RTX if we failed; the caller should emit a normal call,
3127 otherwise try to get the result in TARGET, if convenient (and in
3128 mode MODE if that's convenient). If ENDP is 0 return the
3129 destination pointer, if ENDP is 1 return the end pointer ala
3130 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3134 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3136 if (!validate_arglist (exp
,
3137 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3141 tree dest
= CALL_EXPR_ARG (exp
, 0);
3142 tree src
= CALL_EXPR_ARG (exp
, 1);
3143 tree len
= CALL_EXPR_ARG (exp
, 2);
3144 return expand_builtin_mempcpy_args (dest
, src
, len
,
3145 target
, mode
, /*endp=*/ 1);
3149 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3150 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3151 so that this can also be called without constructing an actual CALL_EXPR.
3152 The other arguments and return value are the same as for
3153 expand_builtin_mempcpy. */
3156 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3157 rtx target
, enum machine_mode mode
, int endp
)
3159 /* If return value is ignored, transform mempcpy into memcpy. */
3160 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3162 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3163 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3165 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3169 const char *src_str
;
3170 unsigned int src_align
= get_pointer_alignment (src
);
3171 unsigned int dest_align
= get_pointer_alignment (dest
);
3172 rtx dest_mem
, src_mem
, len_rtx
;
3174 /* If either SRC or DEST is not a pointer type, don't do this
3175 operation in-line. */
3176 if (dest_align
== 0 || src_align
== 0)
3179 /* If LEN is not constant, call the normal function. */
3180 if (! host_integerp (len
, 1))
3183 len_rtx
= expand_normal (len
);
3184 src_str
= c_getstr (src
);
3186 /* If SRC is a string constant and block move would be done
3187 by pieces, we can avoid loading the string from memory
3188 and only stored the computed constants. */
3190 && CONST_INT_P (len_rtx
)
3191 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3192 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3193 CONST_CAST (char *, src_str
),
3196 dest_mem
= get_memory_rtx (dest
, len
);
3197 set_mem_align (dest_mem
, dest_align
);
3198 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3199 builtin_memcpy_read_str
,
3200 CONST_CAST (char *, src_str
),
3201 dest_align
, false, endp
);
3202 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3203 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3207 if (CONST_INT_P (len_rtx
)
3208 && can_move_by_pieces (INTVAL (len_rtx
),
3209 MIN (dest_align
, src_align
)))
3211 dest_mem
= get_memory_rtx (dest
, len
);
3212 set_mem_align (dest_mem
, dest_align
);
3213 src_mem
= get_memory_rtx (src
, len
);
3214 set_mem_align (src_mem
, src_align
);
3215 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3216 MIN (dest_align
, src_align
), endp
);
3217 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3218 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3227 # define HAVE_movstr 0
3228 # define CODE_FOR_movstr CODE_FOR_nothing
3231 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3232 we failed, the caller should emit a normal call, otherwise try to
3233 get the result in TARGET, if convenient. If ENDP is 0 return the
3234 destination pointer, if ENDP is 1 return the end pointer ala
3235 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3239 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3241 struct expand_operand ops
[3];
3248 dest_mem
= get_memory_rtx (dest
, NULL
);
3249 src_mem
= get_memory_rtx (src
, NULL
);
3252 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3253 dest_mem
= replace_equiv_address (dest_mem
, target
);
3256 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3257 create_fixed_operand (&ops
[1], dest_mem
);
3258 create_fixed_operand (&ops
[2], src_mem
);
3259 expand_insn (CODE_FOR_movstr
, 3, ops
);
3261 if (endp
&& target
!= const0_rtx
)
3263 target
= ops
[0].value
;
3264 /* movstr is supposed to set end to the address of the NUL
3265 terminator. If the caller requested a mempcpy-like return value,
3269 rtx tem
= plus_constant (GET_MODE (target
),
3270 gen_lowpart (GET_MODE (target
), target
), 1);
3271 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3277 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3278 NULL_RTX if we failed the caller should emit a normal call, otherwise
3279 try to get the result in TARGET, if convenient (and in mode MODE if that's
3283 expand_builtin_strcpy (tree exp
, rtx target
)
3285 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3287 tree dest
= CALL_EXPR_ARG (exp
, 0);
3288 tree src
= CALL_EXPR_ARG (exp
, 1);
3289 return expand_builtin_strcpy_args (dest
, src
, target
);
3294 /* Helper function to do the actual work for expand_builtin_strcpy. The
3295 arguments to the builtin_strcpy call DEST and SRC are broken out
3296 so that this can also be called without constructing an actual CALL_EXPR.
3297 The other arguments and return value are the same as for
3298 expand_builtin_strcpy. */
3301 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3303 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3306 /* Expand a call EXP to the stpcpy builtin.
3307 Return NULL_RTX if we failed the caller should emit a normal call,
3308 otherwise try to get the result in TARGET, if convenient (and in
3309 mode MODE if that's convenient). */
3312 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3315 location_t loc
= EXPR_LOCATION (exp
);
3317 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3320 dst
= CALL_EXPR_ARG (exp
, 0);
3321 src
= CALL_EXPR_ARG (exp
, 1);
3323 /* If return value is ignored, transform stpcpy into strcpy. */
3324 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3326 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3327 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3328 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3335 /* Ensure we get an actual string whose length can be evaluated at
3336 compile-time, not an expression containing a string. This is
3337 because the latter will potentially produce pessimized code
3338 when used to produce the return value. */
3339 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3340 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3342 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3343 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3344 target
, mode
, /*endp=*/2);
3349 if (TREE_CODE (len
) == INTEGER_CST
)
3351 rtx len_rtx
= expand_normal (len
);
3353 if (CONST_INT_P (len_rtx
))
3355 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3361 if (mode
!= VOIDmode
)
3362 target
= gen_reg_rtx (mode
);
3364 target
= gen_reg_rtx (GET_MODE (ret
));
3366 if (GET_MODE (target
) != GET_MODE (ret
))
3367 ret
= gen_lowpart (GET_MODE (target
), ret
);
3369 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3370 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3378 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3382 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3383 bytes from constant string DATA + OFFSET and return it as target
3387 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3388 enum machine_mode mode
)
3390 const char *str
= (const char *) data
;
3392 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3395 return c_readstr (str
+ offset
, mode
);
3398 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3399 NULL_RTX if we failed the caller should emit a normal call. */
3402 expand_builtin_strncpy (tree exp
, rtx target
)
3404 location_t loc
= EXPR_LOCATION (exp
);
3406 if (validate_arglist (exp
,
3407 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3409 tree dest
= CALL_EXPR_ARG (exp
, 0);
3410 tree src
= CALL_EXPR_ARG (exp
, 1);
3411 tree len
= CALL_EXPR_ARG (exp
, 2);
3412 tree slen
= c_strlen (src
, 1);
3414 /* We must be passed a constant len and src parameter. */
3415 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3418 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3420 /* We're required to pad with trailing zeros if the requested
3421 len is greater than strlen(s2)+1. In that case try to
3422 use store_by_pieces, if it fails, punt. */
3423 if (tree_int_cst_lt (slen
, len
))
3425 unsigned int dest_align
= get_pointer_alignment (dest
);
3426 const char *p
= c_getstr (src
);
3429 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3430 || !can_store_by_pieces (tree_low_cst (len
, 1),
3431 builtin_strncpy_read_str
,
3432 CONST_CAST (char *, p
),
3436 dest_mem
= get_memory_rtx (dest
, len
);
3437 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3438 builtin_strncpy_read_str
,
3439 CONST_CAST (char *, p
), dest_align
, false, 0);
3440 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3441 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3448 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3449 bytes from constant string DATA + OFFSET and return it as target
3453 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3454 enum machine_mode mode
)
3456 const char *c
= (const char *) data
;
3457 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3459 memset (p
, *c
, GET_MODE_SIZE (mode
));
3461 return c_readstr (p
, mode
);
3464 /* Callback routine for store_by_pieces. Return the RTL of a register
3465 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3466 char value given in the RTL register data. For example, if mode is
3467 4 bytes wide, return the RTL for 0x01010101*data. */
3470 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3471 enum machine_mode mode
)
3477 size
= GET_MODE_SIZE (mode
);
3481 p
= XALLOCAVEC (char, size
);
3482 memset (p
, 1, size
);
3483 coeff
= c_readstr (p
, mode
);
3485 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3486 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3487 return force_reg (mode
, target
);
3490 /* Expand expression EXP, which is a call to the memset builtin. Return
3491 NULL_RTX if we failed the caller should emit a normal call, otherwise
3492 try to get the result in TARGET, if convenient (and in mode MODE if that's
3496 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3498 if (!validate_arglist (exp
,
3499 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3503 tree dest
= CALL_EXPR_ARG (exp
, 0);
3504 tree val
= CALL_EXPR_ARG (exp
, 1);
3505 tree len
= CALL_EXPR_ARG (exp
, 2);
3506 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3510 /* Helper function to do the actual work for expand_builtin_memset. The
3511 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3512 so that this can also be called without constructing an actual CALL_EXPR.
3513 The other arguments and return value are the same as for
3514 expand_builtin_memset. */
3517 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3518 rtx target
, enum machine_mode mode
, tree orig_exp
)
3521 enum built_in_function fcode
;
3522 enum machine_mode val_mode
;
3524 unsigned int dest_align
;
3525 rtx dest_mem
, dest_addr
, len_rtx
;
3526 HOST_WIDE_INT expected_size
= -1;
3527 unsigned int expected_align
= 0;
3529 dest_align
= get_pointer_alignment (dest
);
3531 /* If DEST is not a pointer type, don't do this operation in-line. */
3532 if (dest_align
== 0)
3535 if (currently_expanding_gimple_stmt
)
3536 stringop_block_profile (currently_expanding_gimple_stmt
,
3537 &expected_align
, &expected_size
);
3539 if (expected_align
< dest_align
)
3540 expected_align
= dest_align
;
3542 /* If the LEN parameter is zero, return DEST. */
3543 if (integer_zerop (len
))
3545 /* Evaluate and ignore VAL in case it has side-effects. */
3546 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3547 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3550 /* Stabilize the arguments in case we fail. */
3551 dest
= builtin_save_expr (dest
);
3552 val
= builtin_save_expr (val
);
3553 len
= builtin_save_expr (len
);
3555 len_rtx
= expand_normal (len
);
3556 dest_mem
= get_memory_rtx (dest
, len
);
3557 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3559 if (TREE_CODE (val
) != INTEGER_CST
)
3563 val_rtx
= expand_normal (val
);
3564 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3566 /* Assume that we can memset by pieces if we can store
3567 * the coefficients by pieces (in the required modes).
3568 * We can't pass builtin_memset_gen_str as that emits RTL. */
3570 if (host_integerp (len
, 1)
3571 && can_store_by_pieces (tree_low_cst (len
, 1),
3572 builtin_memset_read_str
, &c
, dest_align
,
3575 val_rtx
= force_reg (val_mode
, val_rtx
);
3576 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3577 builtin_memset_gen_str
, val_rtx
, dest_align
,
3580 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3581 dest_align
, expected_align
,
3585 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3586 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3590 if (target_char_cast (val
, &c
))
3595 if (host_integerp (len
, 1)
3596 && can_store_by_pieces (tree_low_cst (len
, 1),
3597 builtin_memset_read_str
, &c
, dest_align
,
3599 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3600 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3601 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3602 gen_int_mode (c
, val_mode
),
3603 dest_align
, expected_align
,
3607 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3608 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3612 set_mem_align (dest_mem
, dest_align
);
3613 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3614 CALL_EXPR_TAILCALL (orig_exp
)
3615 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3616 expected_align
, expected_size
);
3620 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3621 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3627 fndecl
= get_callee_fndecl (orig_exp
);
3628 fcode
= DECL_FUNCTION_CODE (fndecl
);
3629 if (fcode
== BUILT_IN_MEMSET
)
3630 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3632 else if (fcode
== BUILT_IN_BZERO
)
3633 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3637 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3638 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3639 return expand_call (fn
, target
, target
== const0_rtx
);
3642 /* Expand expression EXP, which is a call to the bzero builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call. */
3646 expand_builtin_bzero (tree exp
)
3649 location_t loc
= EXPR_LOCATION (exp
);
3651 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3654 dest
= CALL_EXPR_ARG (exp
, 0);
3655 size
= CALL_EXPR_ARG (exp
, 1);
3657 /* New argument list transforming bzero(ptr x, int y) to
3658 memset(ptr x, int 0, size_t y). This is done this way
3659 so that if it isn't expanded inline, we fallback to
3660 calling bzero instead of memset. */
3662 return expand_builtin_memset_args (dest
, integer_zero_node
,
3663 fold_convert_loc (loc
,
3664 size_type_node
, size
),
3665 const0_rtx
, VOIDmode
, exp
);
3668 /* Expand expression EXP, which is a call to the memcmp built-in function.
3669 Return NULL_RTX if we failed and the caller should emit a normal call,
3670 otherwise try to get the result in TARGET, if convenient (and in mode
3671 MODE, if that's convenient). */
3674 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3675 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3677 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3679 if (!validate_arglist (exp
,
3680 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3683 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3684 implementing memcmp because it will stop if it encounters two
3686 #if defined HAVE_cmpmemsi
3688 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3691 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3692 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3693 tree len
= CALL_EXPR_ARG (exp
, 2);
3695 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3696 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3697 enum machine_mode insn_mode
;
3700 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3704 /* If we don't have POINTER_TYPE, call the function. */
3705 if (arg1_align
== 0 || arg2_align
== 0)
3708 /* Make a place to write the result of the instruction. */
3711 && REG_P (result
) && GET_MODE (result
) == insn_mode
3712 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3713 result
= gen_reg_rtx (insn_mode
);
3715 arg1_rtx
= get_memory_rtx (arg1
, len
);
3716 arg2_rtx
= get_memory_rtx (arg2
, len
);
3717 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3719 /* Set MEM_SIZE as appropriate. */
3720 if (CONST_INT_P (arg3_rtx
))
3722 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3723 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3727 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3728 GEN_INT (MIN (arg1_align
, arg2_align
)));
3735 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3736 TYPE_MODE (integer_type_node
), 3,
3737 XEXP (arg1_rtx
, 0), Pmode
,
3738 XEXP (arg2_rtx
, 0), Pmode
,
3739 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3740 TYPE_UNSIGNED (sizetype
)),
3741 TYPE_MODE (sizetype
));
3743 /* Return the value in the proper mode for this function. */
3744 mode
= TYPE_MODE (TREE_TYPE (exp
));
3745 if (GET_MODE (result
) == mode
)
3747 else if (target
!= 0)
3749 convert_move (target
, result
, 0);
3753 return convert_to_mode (mode
, result
, 0);
3755 #endif /* HAVE_cmpmemsi. */
3760 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3761 if we failed the caller should emit a normal call, otherwise try to get
3762 the result in TARGET, if convenient. */
3765 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3767 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3770 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3771 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3772 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3774 rtx arg1_rtx
, arg2_rtx
;
3775 rtx result
, insn
= NULL_RTX
;
3777 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3778 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3780 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3781 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3783 /* If we don't have POINTER_TYPE, call the function. */
3784 if (arg1_align
== 0 || arg2_align
== 0)
3787 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3788 arg1
= builtin_save_expr (arg1
);
3789 arg2
= builtin_save_expr (arg2
);
3791 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3792 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3794 #ifdef HAVE_cmpstrsi
3795 /* Try to call cmpstrsi. */
3798 enum machine_mode insn_mode
3799 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3801 /* Make a place to write the result of the instruction. */
3804 && REG_P (result
) && GET_MODE (result
) == insn_mode
3805 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3806 result
= gen_reg_rtx (insn_mode
);
3808 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3809 GEN_INT (MIN (arg1_align
, arg2_align
)));
3812 #ifdef HAVE_cmpstrnsi
3813 /* Try to determine at least one length and call cmpstrnsi. */
3814 if (!insn
&& HAVE_cmpstrnsi
)
3819 enum machine_mode insn_mode
3820 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3821 tree len1
= c_strlen (arg1
, 1);
3822 tree len2
= c_strlen (arg2
, 1);
3825 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3827 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3829 /* If we don't have a constant length for the first, use the length
3830 of the second, if we know it. We don't require a constant for
3831 this case; some cost analysis could be done if both are available
3832 but neither is constant. For now, assume they're equally cheap,
3833 unless one has side effects. If both strings have constant lengths,
3840 else if (TREE_SIDE_EFFECTS (len1
))
3842 else if (TREE_SIDE_EFFECTS (len2
))
3844 else if (TREE_CODE (len1
) != INTEGER_CST
)
3846 else if (TREE_CODE (len2
) != INTEGER_CST
)
3848 else if (tree_int_cst_lt (len1
, len2
))
3853 /* If both arguments have side effects, we cannot optimize. */
3854 if (!len
|| TREE_SIDE_EFFECTS (len
))
3857 arg3_rtx
= expand_normal (len
);
3859 /* Make a place to write the result of the instruction. */
3862 && REG_P (result
) && GET_MODE (result
) == insn_mode
3863 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3864 result
= gen_reg_rtx (insn_mode
);
3866 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3867 GEN_INT (MIN (arg1_align
, arg2_align
)));
3873 enum machine_mode mode
;
3876 /* Return the value in the proper mode for this function. */
3877 mode
= TYPE_MODE (TREE_TYPE (exp
));
3878 if (GET_MODE (result
) == mode
)
3881 return convert_to_mode (mode
, result
, 0);
3882 convert_move (target
, result
, 0);
3886 /* Expand the library call ourselves using a stabilized argument
3887 list to avoid re-evaluating the function's arguments twice. */
3888 #ifdef HAVE_cmpstrnsi
3891 fndecl
= get_callee_fndecl (exp
);
3892 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3893 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3894 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3895 return expand_call (fn
, target
, target
== const0_rtx
);
3901 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3902 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3903 the result in TARGET, if convenient. */
3906 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3907 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3909 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3911 if (!validate_arglist (exp
,
3912 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3915 /* If c_strlen can determine an expression for one of the string
3916 lengths, and it doesn't have side effects, then emit cmpstrnsi
3917 using length MIN(strlen(string)+1, arg3). */
3918 #ifdef HAVE_cmpstrnsi
3921 tree len
, len1
, len2
;
3922 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3925 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3926 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3927 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3929 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3930 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3931 enum machine_mode insn_mode
3932 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3934 len1
= c_strlen (arg1
, 1);
3935 len2
= c_strlen (arg2
, 1);
3938 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3940 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3942 /* If we don't have a constant length for the first, use the length
3943 of the second, if we know it. We don't require a constant for
3944 this case; some cost analysis could be done if both are available
3945 but neither is constant. For now, assume they're equally cheap,
3946 unless one has side effects. If both strings have constant lengths,
3953 else if (TREE_SIDE_EFFECTS (len1
))
3955 else if (TREE_SIDE_EFFECTS (len2
))
3957 else if (TREE_CODE (len1
) != INTEGER_CST
)
3959 else if (TREE_CODE (len2
) != INTEGER_CST
)
3961 else if (tree_int_cst_lt (len1
, len2
))
3966 /* If both arguments have side effects, we cannot optimize. */
3967 if (!len
|| TREE_SIDE_EFFECTS (len
))
3970 /* The actual new length parameter is MIN(len,arg3). */
3971 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
3972 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
3974 /* If we don't have POINTER_TYPE, call the function. */
3975 if (arg1_align
== 0 || arg2_align
== 0)
3978 /* Make a place to write the result of the instruction. */
3981 && REG_P (result
) && GET_MODE (result
) == insn_mode
3982 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3983 result
= gen_reg_rtx (insn_mode
);
3985 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3986 arg1
= builtin_save_expr (arg1
);
3987 arg2
= builtin_save_expr (arg2
);
3988 len
= builtin_save_expr (len
);
3990 arg1_rtx
= get_memory_rtx (arg1
, len
);
3991 arg2_rtx
= get_memory_rtx (arg2
, len
);
3992 arg3_rtx
= expand_normal (len
);
3993 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3994 GEN_INT (MIN (arg1_align
, arg2_align
)));
3999 /* Return the value in the proper mode for this function. */
4000 mode
= TYPE_MODE (TREE_TYPE (exp
));
4001 if (GET_MODE (result
) == mode
)
4004 return convert_to_mode (mode
, result
, 0);
4005 convert_move (target
, result
, 0);
4009 /* Expand the library call ourselves using a stabilized argument
4010 list to avoid re-evaluating the function's arguments twice. */
4011 fndecl
= get_callee_fndecl (exp
);
4012 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4014 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4015 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4016 return expand_call (fn
, target
, target
== const0_rtx
);
4022 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4023 if that's convenient. */
4026 expand_builtin_saveregs (void)
4030 /* Don't do __builtin_saveregs more than once in a function.
4031 Save the result of the first call and reuse it. */
4032 if (saveregs_value
!= 0)
4033 return saveregs_value
;
4035 /* When this function is called, it means that registers must be
4036 saved on entry to this function. So we migrate the call to the
4037 first insn of this function. */
4041 /* Do whatever the machine needs done in this case. */
4042 val
= targetm
.calls
.expand_builtin_saveregs ();
4047 saveregs_value
= val
;
4049 /* Put the insns after the NOTE that starts the function. If this
4050 is inside a start_sequence, make the outer-level insn chain current, so
4051 the code is placed at the start of the function. */
4052 push_topmost_sequence ();
4053 emit_insn_after (seq
, entry_of_function ());
4054 pop_topmost_sequence ();
4059 /* Expand a call to __builtin_next_arg. */
4062 expand_builtin_next_arg (void)
4064 /* Checking arguments is already done in fold_builtin_next_arg
4065 that must be called before this function. */
4066 return expand_binop (ptr_mode
, add_optab
,
4067 crtl
->args
.internal_arg_pointer
,
4068 crtl
->args
.arg_offset_rtx
,
4069 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4072 /* Make it easier for the backends by protecting the valist argument
4073 from multiple evaluations. */
4076 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4078 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4080 /* The current way of determining the type of valist is completely
4081 bogus. We should have the information on the va builtin instead. */
4083 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4085 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4087 if (TREE_SIDE_EFFECTS (valist
))
4088 valist
= save_expr (valist
);
4090 /* For this case, the backends will be expecting a pointer to
4091 vatype, but it's possible we've actually been given an array
4092 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4094 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4096 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4097 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4102 tree pt
= build_pointer_type (vatype
);
4106 if (! TREE_SIDE_EFFECTS (valist
))
4109 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4110 TREE_SIDE_EFFECTS (valist
) = 1;
4113 if (TREE_SIDE_EFFECTS (valist
))
4114 valist
= save_expr (valist
);
4115 valist
= fold_build2_loc (loc
, MEM_REF
,
4116 vatype
, valist
, build_int_cst (pt
, 0));
4122 /* The "standard" definition of va_list is void*. */
4125 std_build_builtin_va_list (void)
4127 return ptr_type_node
;
4130 /* The "standard" abi va_list is va_list_type_node. */
4133 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4135 return va_list_type_node
;
4138 /* The "standard" type of va_list is va_list_type_node. */
4141 std_canonical_va_list_type (tree type
)
4145 if (INDIRECT_REF_P (type
))
4146 type
= TREE_TYPE (type
);
4147 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4148 type
= TREE_TYPE (type
);
4149 wtype
= va_list_type_node
;
4151 /* Treat structure va_list types. */
4152 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4153 htype
= TREE_TYPE (htype
);
4154 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4156 /* If va_list is an array type, the argument may have decayed
4157 to a pointer type, e.g. by being passed to another function.
4158 In that case, unwrap both types so that we can compare the
4159 underlying records. */
4160 if (TREE_CODE (htype
) == ARRAY_TYPE
4161 || POINTER_TYPE_P (htype
))
4163 wtype
= TREE_TYPE (wtype
);
4164 htype
= TREE_TYPE (htype
);
4167 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4168 return va_list_type_node
;
4173 /* The "standard" implementation of va_start: just assign `nextarg' to
4177 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4179 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4180 convert_move (va_r
, nextarg
, 0);
4183 /* Expand EXP, a call to __builtin_va_start. */
4186 expand_builtin_va_start (tree exp
)
4190 location_t loc
= EXPR_LOCATION (exp
);
4192 if (call_expr_nargs (exp
) < 2)
4194 error_at (loc
, "too few arguments to function %<va_start%>");
4198 if (fold_builtin_next_arg (exp
, true))
4201 nextarg
= expand_builtin_next_arg ();
4202 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4204 if (targetm
.expand_builtin_va_start
)
4205 targetm
.expand_builtin_va_start (valist
, nextarg
);
4207 std_expand_builtin_va_start (valist
, nextarg
);
4212 /* The "standard" implementation of va_arg: read the value from the
4213 current (padded) address and increment by the (padded) size. */
4216 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4219 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4220 unsigned HOST_WIDE_INT align
, boundary
;
4223 #ifdef ARGS_GROW_DOWNWARD
4224 /* All of the alignment and movement below is for args-grow-up machines.
4225 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4226 implement their own specialized gimplify_va_arg_expr routines. */
4230 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4232 type
= build_pointer_type (type
);
4234 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4235 boundary
= targetm
.calls
.function_arg_boundary (TYPE_MODE (type
), type
);
4237 /* When we align parameter on stack for caller, if the parameter
4238 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4239 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4240 here with caller. */
4241 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4242 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4244 boundary
/= BITS_PER_UNIT
;
4246 /* Hoist the valist value into a temporary for the moment. */
4247 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4249 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4250 requires greater alignment, we must perform dynamic alignment. */
4251 if (boundary
> align
4252 && !integer_zerop (TYPE_SIZE (type
)))
4254 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4255 fold_build_pointer_plus_hwi (valist_tmp
, boundary
- 1));
4256 gimplify_and_add (t
, pre_p
);
4258 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4259 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (valist
),
4261 build_int_cst (TREE_TYPE (valist
), -boundary
)));
4262 gimplify_and_add (t
, pre_p
);
4267 /* If the actual alignment is less than the alignment of the type,
4268 adjust the type accordingly so that we don't assume strict alignment
4269 when dereferencing the pointer. */
4270 boundary
*= BITS_PER_UNIT
;
4271 if (boundary
< TYPE_ALIGN (type
))
4273 type
= build_variant_type_copy (type
);
4274 TYPE_ALIGN (type
) = boundary
;
4277 /* Compute the rounded size of the type. */
4278 type_size
= size_in_bytes (type
);
4279 rounded_size
= round_up (type_size
, align
);
4281 /* Reduce rounded_size so it's sharable with the postqueue. */
4282 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4286 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4288 /* Small args are padded downward. */
4289 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
4290 rounded_size
, size_int (align
));
4291 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4292 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4293 addr
= fold_build_pointer_plus (addr
, t
);
4296 /* Compute new value for AP. */
4297 t
= fold_build_pointer_plus (valist_tmp
, rounded_size
);
4298 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4299 gimplify_and_add (t
, pre_p
);
4301 addr
= fold_convert (build_pointer_type (type
), addr
);
4304 addr
= build_va_arg_indirect_ref (addr
);
4306 return build_va_arg_indirect_ref (addr
);
4309 /* Build an indirect-ref expression over the given TREE, which represents a
4310 piece of a va_arg() expansion. */
4312 build_va_arg_indirect_ref (tree addr
)
4314 addr
= build_simple_mem_ref_loc (EXPR_LOCATION (addr
), addr
);
4316 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4322 /* Return a dummy expression of type TYPE in order to keep going after an
4326 dummy_object (tree type
)
4328 tree t
= build_int_cst (build_pointer_type (type
), 0);
4329 return build2 (MEM_REF
, type
, t
, t
);
4332 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4333 builtin function, but a very special sort of operator. */
4335 enum gimplify_status
4336 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4338 tree promoted_type
, have_va_type
;
4339 tree valist
= TREE_OPERAND (*expr_p
, 0);
4340 tree type
= TREE_TYPE (*expr_p
);
4342 location_t loc
= EXPR_LOCATION (*expr_p
);
4344 /* Verify that valist is of the proper type. */
4345 have_va_type
= TREE_TYPE (valist
);
4346 if (have_va_type
== error_mark_node
)
4348 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4350 if (have_va_type
== NULL_TREE
)
4352 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4356 /* Generate a diagnostic for requesting data of a type that cannot
4357 be passed through `...' due to type promotion at the call site. */
4358 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4361 static bool gave_help
;
4364 /* Unfortunately, this is merely undefined, rather than a constraint
4365 violation, so we cannot make this an error. If this call is never
4366 executed, the program is still strictly conforming. */
4367 warned
= warning_at (loc
, 0,
4368 "%qT is promoted to %qT when passed through %<...%>",
4369 type
, promoted_type
);
4370 if (!gave_help
&& warned
)
4373 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4374 promoted_type
, type
);
4377 /* We can, however, treat "undefined" any way we please.
4378 Call abort to encourage the user to fix the program. */
4380 inform (loc
, "if this code is reached, the program will abort");
4381 /* Before the abort, allow the evaluation of the va_list
4382 expression to exit or longjmp. */
4383 gimplify_and_add (valist
, pre_p
);
4384 t
= build_call_expr_loc (loc
,
4385 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
4386 gimplify_and_add (t
, pre_p
);
4388 /* This is dead code, but go ahead and finish so that the
4389 mode of the result comes out right. */
4390 *expr_p
= dummy_object (type
);
4395 /* Make it easier for the backends by protecting the valist argument
4396 from multiple evaluations. */
4397 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4399 /* For this case, the backends will be expecting a pointer to
4400 TREE_TYPE (abi), but it's possible we've
4401 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4403 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4405 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4406 valist
= fold_convert_loc (loc
, p1
,
4407 build_fold_addr_expr_loc (loc
, valist
));
4410 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4413 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4415 if (!targetm
.gimplify_va_arg_expr
)
4416 /* FIXME: Once most targets are converted we should merely
4417 assert this is non-null. */
4420 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4425 /* Expand EXP, a call to __builtin_va_end. */
4428 expand_builtin_va_end (tree exp
)
4430 tree valist
= CALL_EXPR_ARG (exp
, 0);
4432 /* Evaluate for side effects, if needed. I hate macros that don't
4434 if (TREE_SIDE_EFFECTS (valist
))
4435 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4440 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4441 builtin rather than just as an assignment in stdarg.h because of the
4442 nastiness of array-type va_list types. */
4445 expand_builtin_va_copy (tree exp
)
4448 location_t loc
= EXPR_LOCATION (exp
);
4450 dst
= CALL_EXPR_ARG (exp
, 0);
4451 src
= CALL_EXPR_ARG (exp
, 1);
4453 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4454 src
= stabilize_va_list_loc (loc
, src
, 0);
4456 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4458 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4460 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4461 TREE_SIDE_EFFECTS (t
) = 1;
4462 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4466 rtx dstb
, srcb
, size
;
4468 /* Evaluate to pointers. */
4469 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4470 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4471 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4472 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4474 dstb
= convert_memory_address (Pmode
, dstb
);
4475 srcb
= convert_memory_address (Pmode
, srcb
);
4477 /* "Dereference" to BLKmode memories. */
4478 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4479 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4480 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4481 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4482 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4483 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4486 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4492 /* Expand a call to one of the builtin functions __builtin_frame_address or
4493 __builtin_return_address. */
4496 expand_builtin_frame_address (tree fndecl
, tree exp
)
4498 /* The argument must be a nonnegative integer constant.
4499 It counts the number of frames to scan up the stack.
4500 The value is the return address saved in that frame. */
4501 if (call_expr_nargs (exp
) == 0)
4502 /* Warning about missing arg was already issued. */
4504 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4506 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4507 error ("invalid argument to %<__builtin_frame_address%>");
4509 error ("invalid argument to %<__builtin_return_address%>");
4515 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4516 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4518 /* Some ports cannot access arbitrary stack frames. */
4521 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4522 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4524 warning (0, "unsupported argument to %<__builtin_return_address%>");
4528 /* For __builtin_frame_address, return what we've got. */
4529 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4533 && ! CONSTANT_P (tem
))
4534 tem
= copy_addr_to_reg (tem
);
4539 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4540 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4541 is the same as for allocate_dynamic_stack_space. */
4544 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4550 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4551 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4553 /* Emit normal call if we use mudflap. */
4558 = (alloca_with_align
4559 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4560 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4565 /* Compute the argument. */
4566 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4568 /* Compute the alignment. */
4569 align
= (alloca_with_align
4570 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4571 : BIGGEST_ALIGNMENT
);
4573 /* Allocate the desired space. */
4574 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4575 result
= convert_memory_address (ptr_mode
, result
);
4580 /* Expand a call to bswap builtin in EXP.
4581 Return NULL_RTX if a normal call should be emitted rather than expanding the
4582 function in-line. If convenient, the result should be placed in TARGET.
4583 SUBTARGET may be used as the target for computing one of EXP's operands. */
4586 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4592 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4595 arg
= CALL_EXPR_ARG (exp
, 0);
4596 op0
= expand_expr (arg
,
4597 subtarget
&& GET_MODE (subtarget
) == target_mode
4598 ? subtarget
: NULL_RTX
,
4599 target_mode
, EXPAND_NORMAL
);
4600 if (GET_MODE (op0
) != target_mode
)
4601 op0
= convert_to_mode (target_mode
, op0
, 1);
4603 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4605 gcc_assert (target
);
4607 return convert_to_mode (target_mode
, target
, 1);
4610 /* Expand a call to a unary builtin in EXP.
4611 Return NULL_RTX if a normal call should be emitted rather than expanding the
4612 function in-line. If convenient, the result should be placed in TARGET.
4613 SUBTARGET may be used as the target for computing one of EXP's operands. */
4616 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4617 rtx subtarget
, optab op_optab
)
4621 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4624 /* Compute the argument. */
4625 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4627 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4628 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4629 VOIDmode
, EXPAND_NORMAL
);
4630 /* Compute op, into TARGET if possible.
4631 Set TARGET to wherever the result comes back. */
4632 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4633 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4634 gcc_assert (target
);
4636 return convert_to_mode (target_mode
, target
, 0);
4639 /* Expand a call to __builtin_expect. We just return our argument
4640 as the builtin_expect semantic should've been already executed by
4641 tree branch prediction pass. */
4644 expand_builtin_expect (tree exp
, rtx target
)
4648 if (call_expr_nargs (exp
) < 2)
4650 arg
= CALL_EXPR_ARG (exp
, 0);
4652 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4653 /* When guessing was done, the hints should be already stripped away. */
4654 gcc_assert (!flag_guess_branch_prob
4655 || optimize
== 0 || seen_error ());
4659 /* Expand a call to __builtin_assume_aligned. We just return our first
4660 argument as the builtin_assume_aligned semantic should've been already
4664 expand_builtin_assume_aligned (tree exp
, rtx target
)
4666 if (call_expr_nargs (exp
) < 2)
4668 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4670 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4671 && (call_expr_nargs (exp
) < 3
4672 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4677 expand_builtin_trap (void)
4681 emit_insn (gen_trap ());
4684 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4688 /* Expand a call to __builtin_unreachable. We do nothing except emit
4689 a barrier saying that control flow will not pass here.
4691 It is the responsibility of the program being compiled to ensure
4692 that control flow does never reach __builtin_unreachable. */
4694 expand_builtin_unreachable (void)
4699 /* Expand EXP, a call to fabs, fabsf or fabsl.
4700 Return NULL_RTX if a normal call should be emitted rather than expanding
4701 the function inline. If convenient, the result should be placed
4702 in TARGET. SUBTARGET may be used as the target for computing
4706 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4708 enum machine_mode mode
;
4712 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4715 arg
= CALL_EXPR_ARG (exp
, 0);
4716 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4717 mode
= TYPE_MODE (TREE_TYPE (arg
));
4718 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4719 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4722 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4723 Return NULL is a normal call should be emitted rather than expanding the
4724 function inline. If convenient, the result should be placed in TARGET.
4725 SUBTARGET may be used as the target for computing the operand. */
4728 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4733 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4736 arg
= CALL_EXPR_ARG (exp
, 0);
4737 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4739 arg
= CALL_EXPR_ARG (exp
, 1);
4740 op1
= expand_normal (arg
);
4742 return expand_copysign (op0
, op1
, target
);
4745 /* Create a new constant string literal and return a char* pointer to it.
4746 The STRING_CST value is the LEN characters at STR. */
4748 build_string_literal (int len
, const char *str
)
4750 tree t
, elem
, index
, type
;
4752 t
= build_string (len
, str
);
4753 elem
= build_type_variant (char_type_node
, 1, 0);
4754 index
= build_index_type (size_int (len
- 1));
4755 type
= build_array_type (elem
, index
);
4756 TREE_TYPE (t
) = type
;
4757 TREE_CONSTANT (t
) = 1;
4758 TREE_READONLY (t
) = 1;
4759 TREE_STATIC (t
) = 1;
4761 type
= build_pointer_type (elem
);
4762 t
= build1 (ADDR_EXPR
, type
,
4763 build4 (ARRAY_REF
, elem
,
4764 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4768 /* Expand a call to __builtin___clear_cache. */
4771 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4773 #ifndef HAVE_clear_cache
4774 #ifdef CLEAR_INSN_CACHE
4775 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4776 does something. Just do the default expansion to a call to
4780 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4781 does nothing. There is no need to call it. Do nothing. */
4783 #endif /* CLEAR_INSN_CACHE */
4785 /* We have a "clear_cache" insn, and it will handle everything. */
4787 rtx begin_rtx
, end_rtx
;
4789 /* We must not expand to a library call. If we did, any
4790 fallback library function in libgcc that might contain a call to
4791 __builtin___clear_cache() would recurse infinitely. */
4792 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4794 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4798 if (HAVE_clear_cache
)
4800 struct expand_operand ops
[2];
4802 begin
= CALL_EXPR_ARG (exp
, 0);
4803 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4805 end
= CALL_EXPR_ARG (exp
, 1);
4806 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4808 create_address_operand (&ops
[0], begin_rtx
);
4809 create_address_operand (&ops
[1], end_rtx
);
4810 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4814 #endif /* HAVE_clear_cache */
4817 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4820 round_trampoline_addr (rtx tramp
)
4822 rtx temp
, addend
, mask
;
4824 /* If we don't need too much alignment, we'll have been guaranteed
4825 proper alignment by get_trampoline_type. */
4826 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4829 /* Round address up to desired boundary. */
4830 temp
= gen_reg_rtx (Pmode
);
4831 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
4832 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
4834 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4835 temp
, 0, OPTAB_LIB_WIDEN
);
4836 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4837 temp
, 0, OPTAB_LIB_WIDEN
);
4843 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4845 tree t_tramp
, t_func
, t_chain
;
4846 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4848 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4849 POINTER_TYPE
, VOID_TYPE
))
4852 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4853 t_func
= CALL_EXPR_ARG (exp
, 1);
4854 t_chain
= CALL_EXPR_ARG (exp
, 2);
4856 r_tramp
= expand_normal (t_tramp
);
4857 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4858 MEM_NOTRAP_P (m_tramp
) = 1;
4860 /* If ONSTACK, the TRAMP argument should be the address of a field
4861 within the local function's FRAME decl. Either way, let's see if
4862 we can fill in the MEM_ATTRs for this memory. */
4863 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4864 set_mem_attributes_minus_bitpos (m_tramp
, TREE_OPERAND (t_tramp
, 0),
4867 /* Creator of a heap trampoline is responsible for making sure the
4868 address is aligned to at least STACK_BOUNDARY. Normally malloc
4869 will ensure this anyhow. */
4870 tmp
= round_trampoline_addr (r_tramp
);
4873 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4874 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4875 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4878 /* The FUNC argument should be the address of the nested function.
4879 Extract the actual function decl to pass to the hook. */
4880 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4881 t_func
= TREE_OPERAND (t_func
, 0);
4882 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4884 r_chain
= expand_normal (t_chain
);
4886 /* Generate insns to initialize the trampoline. */
4887 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4891 trampolines_created
= 1;
4893 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4894 "trampoline generated for nested function %qD", t_func
);
4901 expand_builtin_adjust_trampoline (tree exp
)
4905 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4908 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4909 tramp
= round_trampoline_addr (tramp
);
4910 if (targetm
.calls
.trampoline_adjust_address
)
4911 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4916 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4917 function. The function first checks whether the back end provides
4918 an insn to implement signbit for the respective mode. If not, it
4919 checks whether the floating point format of the value is such that
4920 the sign bit can be extracted. If that is not the case, the
4921 function returns NULL_RTX to indicate that a normal call should be
4922 emitted rather than expanding the function in-line. EXP is the
4923 expression that is a call to the builtin function; if convenient,
4924 the result should be placed in TARGET. */
4926 expand_builtin_signbit (tree exp
, rtx target
)
4928 const struct real_format
*fmt
;
4929 enum machine_mode fmode
, imode
, rmode
;
4932 enum insn_code icode
;
4934 location_t loc
= EXPR_LOCATION (exp
);
4936 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4939 arg
= CALL_EXPR_ARG (exp
, 0);
4940 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4941 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4942 fmt
= REAL_MODE_FORMAT (fmode
);
4944 arg
= builtin_save_expr (arg
);
4946 /* Expand the argument yielding a RTX expression. */
4947 temp
= expand_normal (arg
);
4949 /* Check if the back end provides an insn that handles signbit for the
4951 icode
= optab_handler (signbit_optab
, fmode
);
4952 if (icode
!= CODE_FOR_nothing
)
4954 rtx last
= get_last_insn ();
4955 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4956 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4958 delete_insns_since (last
);
4961 /* For floating point formats without a sign bit, implement signbit
4963 bitpos
= fmt
->signbit_ro
;
4966 /* But we can't do this if the format supports signed zero. */
4967 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4970 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4971 build_real (TREE_TYPE (arg
), dconst0
));
4972 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4975 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4977 imode
= int_mode_for_mode (fmode
);
4978 if (imode
== BLKmode
)
4980 temp
= gen_lowpart (imode
, temp
);
4985 /* Handle targets with different FP word orders. */
4986 if (FLOAT_WORDS_BIG_ENDIAN
)
4987 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4989 word
= bitpos
/ BITS_PER_WORD
;
4990 temp
= operand_subword_force (temp
, word
, fmode
);
4991 bitpos
= bitpos
% BITS_PER_WORD
;
4994 /* Force the intermediate word_mode (or narrower) result into a
4995 register. This avoids attempting to create paradoxical SUBREGs
4996 of floating point modes below. */
4997 temp
= force_reg (imode
, temp
);
4999 /* If the bitpos is within the "result mode" lowpart, the operation
5000 can be implement with a single bitwise AND. Otherwise, we need
5001 a right shift and an AND. */
5003 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5005 double_int mask
= double_int_zero
.set_bit (bitpos
);
5007 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5008 temp
= gen_lowpart (rmode
, temp
);
5009 temp
= expand_binop (rmode
, and_optab
, temp
,
5010 immed_double_int_const (mask
, rmode
),
5011 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5015 /* Perform a logical right shift to place the signbit in the least
5016 significant bit, then truncate the result to the desired mode
5017 and mask just this bit. */
5018 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5019 temp
= gen_lowpart (rmode
, temp
);
5020 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5021 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5027 /* Expand fork or exec calls. TARGET is the desired target of the
5028 call. EXP is the call. FN is the
5029 identificator of the actual function. IGNORE is nonzero if the
5030 value is to be ignored. */
5033 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5038 /* If we are not profiling, just call the function. */
5039 if (!profile_arc_flag
)
5042 /* Otherwise call the wrapper. This should be equivalent for the rest of
5043 compiler, so the code does not diverge, and the wrapper may run the
5044 code necessary for keeping the profiling sane. */
5046 switch (DECL_FUNCTION_CODE (fn
))
5049 id
= get_identifier ("__gcov_fork");
5052 case BUILT_IN_EXECL
:
5053 id
= get_identifier ("__gcov_execl");
5056 case BUILT_IN_EXECV
:
5057 id
= get_identifier ("__gcov_execv");
5060 case BUILT_IN_EXECLP
:
5061 id
= get_identifier ("__gcov_execlp");
5064 case BUILT_IN_EXECLE
:
5065 id
= get_identifier ("__gcov_execle");
5068 case BUILT_IN_EXECVP
:
5069 id
= get_identifier ("__gcov_execvp");
5072 case BUILT_IN_EXECVE
:
5073 id
= get_identifier ("__gcov_execve");
5080 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5081 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5082 DECL_EXTERNAL (decl
) = 1;
5083 TREE_PUBLIC (decl
) = 1;
5084 DECL_ARTIFICIAL (decl
) = 1;
5085 TREE_NOTHROW (decl
) = 1;
5086 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5087 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5088 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5089 return expand_call (call
, target
, ignore
);
5094 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5095 the pointer in these functions is void*, the tree optimizers may remove
5096 casts. The mode computed in expand_builtin isn't reliable either, due
5097 to __sync_bool_compare_and_swap.
5099 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5100 group of builtins. This gives us log2 of the mode size. */
5102 static inline enum machine_mode
5103 get_builtin_sync_mode (int fcode_diff
)
5105 /* The size is not negotiable, so ask not to get BLKmode in return
5106 if the target indicates that a smaller size would be better. */
5107 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5110 /* Expand the memory expression LOC and return the appropriate memory operand
5111 for the builtin_sync operations. */
5114 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5118 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5119 addr
= convert_memory_address (Pmode
, addr
);
5121 /* Note that we explicitly do not want any alias information for this
5122 memory, so that we kill all other live memories. Otherwise we don't
5123 satisfy the full barrier semantics of the intrinsic. */
5124 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5126 /* The alignment needs to be at least according to that of the mode. */
5127 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5128 get_pointer_alignment (loc
)));
5129 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5130 MEM_VOLATILE_P (mem
) = 1;
5135 /* Make sure an argument is in the right mode.
5136 EXP is the tree argument.
5137 MODE is the mode it should be in. */
5140 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5143 enum machine_mode old_mode
;
5145 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5146 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5147 of CONST_INTs, where we know the old_mode only from the call argument. */
5149 old_mode
= GET_MODE (val
);
5150 if (old_mode
== VOIDmode
)
5151 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5152 val
= convert_modes (mode
, old_mode
, val
, 1);
5157 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5158 EXP is the CALL_EXPR. CODE is the rtx code
5159 that corresponds to the arithmetic or logical operation from the name;
5160 an exception here is that NOT actually means NAND. TARGET is an optional
5161 place for us to store the results; AFTER is true if this is the
5162 fetch_and_xxx form. */
5165 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5166 enum rtx_code code
, bool after
,
5170 location_t loc
= EXPR_LOCATION (exp
);
5172 if (code
== NOT
&& warn_sync_nand
)
5174 tree fndecl
= get_callee_fndecl (exp
);
5175 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5177 static bool warned_f_a_n
, warned_n_a_f
;
5181 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5182 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5183 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5184 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5185 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5189 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5190 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5191 warned_f_a_n
= true;
5194 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5195 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5196 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5197 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5198 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5202 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5203 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5204 warned_n_a_f
= true;
5212 /* Expand the operands. */
5213 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5214 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5216 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5220 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5221 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5222 true if this is the boolean form. TARGET is a place for us to store the
5223 results; this is NOT optional if IS_BOOL is true. */
5226 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5227 bool is_bool
, rtx target
)
5229 rtx old_val
, new_val
, mem
;
5232 /* Expand the operands. */
5233 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5234 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5235 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5237 pbool
= poval
= NULL
;
5238 if (target
!= const0_rtx
)
5245 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5246 false, MEMMODEL_SEQ_CST
,
5253 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5254 general form is actually an atomic exchange, and some targets only
5255 support a reduced form with the second argument being a constant 1.
5256 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5260 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5265 /* Expand the operands. */
5266 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5267 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5269 return expand_sync_lock_test_and_set (target
, mem
, val
);
5272 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5275 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5279 /* Expand the operands. */
5280 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5282 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5285 /* Given an integer representing an ``enum memmodel'', verify its
5286 correctness and return the memory model enum. */
5288 static enum memmodel
5289 get_memmodel (tree exp
)
5292 unsigned HOST_WIDE_INT val
;
5294 /* If the parameter is not a constant, it's a run time value so we'll just
5295 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5296 if (TREE_CODE (exp
) != INTEGER_CST
)
5297 return MEMMODEL_SEQ_CST
;
5299 op
= expand_normal (exp
);
5302 if (targetm
.memmodel_check
)
5303 val
= targetm
.memmodel_check (val
);
5304 else if (val
& ~MEMMODEL_MASK
)
5306 warning (OPT_Winvalid_memory_model
,
5307 "Unknown architecture specifier in memory model to builtin.");
5308 return MEMMODEL_SEQ_CST
;
5311 if ((INTVAL(op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5313 warning (OPT_Winvalid_memory_model
,
5314 "invalid memory model argument to builtin");
5315 return MEMMODEL_SEQ_CST
;
5318 return (enum memmodel
) val
;
5321 /* Expand the __atomic_exchange intrinsic:
5322 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5323 EXP is the CALL_EXPR.
5324 TARGET is an optional place for us to store the results. */
5327 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5330 enum memmodel model
;
5332 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5333 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5335 error ("invalid memory model for %<__atomic_exchange%>");
5339 if (!flag_inline_atomics
)
5342 /* Expand the operands. */
5343 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5344 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5346 return expand_atomic_exchange (target
, mem
, val
, model
);
5349 /* Expand the __atomic_compare_exchange intrinsic:
5350 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5351 TYPE desired, BOOL weak,
5352 enum memmodel success,
5353 enum memmodel failure)
5354 EXP is the CALL_EXPR.
5355 TARGET is an optional place for us to store the results. */
5358 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5361 rtx expect
, desired
, mem
, oldval
;
5362 enum memmodel success
, failure
;
5366 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5367 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5369 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5370 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5372 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5376 if (failure
> success
)
5378 error ("failure memory model cannot be stronger than success "
5379 "memory model for %<__atomic_compare_exchange%>");
5383 if (!flag_inline_atomics
)
5386 /* Expand the operands. */
5387 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5389 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5390 expect
= convert_memory_address (Pmode
, expect
);
5391 expect
= gen_rtx_MEM (mode
, expect
);
5392 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5394 weak
= CALL_EXPR_ARG (exp
, 3);
5396 if (host_integerp (weak
, 0) && tree_low_cst (weak
, 0) != 0)
5400 if (!expand_atomic_compare_and_swap ((target
== const0_rtx
? NULL
: &target
),
5401 &oldval
, mem
, oldval
, desired
,
5402 is_weak
, success
, failure
))
5405 if (oldval
!= expect
)
5406 emit_move_insn (expect
, oldval
);
5411 /* Expand the __atomic_load intrinsic:
5412 TYPE __atomic_load (TYPE *object, enum memmodel)
5413 EXP is the CALL_EXPR.
5414 TARGET is an optional place for us to store the results. */
5417 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5420 enum memmodel model
;
5422 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5423 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5424 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5426 error ("invalid memory model for %<__atomic_load%>");
5430 if (!flag_inline_atomics
)
5433 /* Expand the operand. */
5434 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5436 return expand_atomic_load (target
, mem
, model
);
5440 /* Expand the __atomic_store intrinsic:
5441 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5442 EXP is the CALL_EXPR.
5443 TARGET is an optional place for us to store the results. */
5446 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5449 enum memmodel model
;
5451 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5452 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5453 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5454 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5456 error ("invalid memory model for %<__atomic_store%>");
5460 if (!flag_inline_atomics
)
5463 /* Expand the operands. */
5464 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5465 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5467 return expand_atomic_store (mem
, val
, model
, false);
5470 /* Expand the __atomic_fetch_XXX intrinsic:
5471 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5472 EXP is the CALL_EXPR.
5473 TARGET is an optional place for us to store the results.
5474 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5475 FETCH_AFTER is true if returning the result of the operation.
5476 FETCH_AFTER is false if returning the value before the operation.
5477 IGNORE is true if the result is not used.
5478 EXT_CALL is the correct builtin for an external call if this cannot be
5479 resolved to an instruction sequence. */
5482 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5483 enum rtx_code code
, bool fetch_after
,
5484 bool ignore
, enum built_in_function ext_call
)
5487 enum memmodel model
;
5491 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5493 /* Expand the operands. */
5494 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5495 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5497 /* Only try generating instructions if inlining is turned on. */
5498 if (flag_inline_atomics
)
5500 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5505 /* Return if a different routine isn't needed for the library call. */
5506 if (ext_call
== BUILT_IN_NONE
)
5509 /* Change the call to the specified function. */
5510 fndecl
= get_callee_fndecl (exp
);
5511 addr
= CALL_EXPR_FN (exp
);
5514 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5515 TREE_OPERAND (addr
, 0) = builtin_decl_explicit(ext_call
);
5517 /* Expand the call here so we can emit trailing code. */
5518 ret
= expand_call (exp
, target
, ignore
);
5520 /* Replace the original function just in case it matters. */
5521 TREE_OPERAND (addr
, 0) = fndecl
;
5523 /* Then issue the arithmetic correction to return the right result. */
5528 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5530 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5533 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5540 #ifndef HAVE_atomic_clear
5541 # define HAVE_atomic_clear 0
5542 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5545 /* Expand an atomic clear operation.
5546 void _atomic_clear (BOOL *obj, enum memmodel)
5547 EXP is the call expression. */
5550 expand_builtin_atomic_clear (tree exp
)
5552 enum machine_mode mode
;
5554 enum memmodel model
;
5556 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5557 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5558 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5560 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5561 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5563 error ("invalid memory model for %<__atomic_store%>");
5567 if (HAVE_atomic_clear
)
5569 emit_insn (gen_atomic_clear (mem
, model
));
5573 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5574 Failing that, a store is issued by __atomic_store. The only way this can
5575 fail is if the bool type is larger than a word size. Unlikely, but
5576 handle it anyway for completeness. Assume a single threaded model since
5577 there is no atomic support in this case, and no barriers are required. */
5578 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5580 emit_move_insn (mem
, const0_rtx
);
5584 /* Expand an atomic test_and_set operation.
5585 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5586 EXP is the call expression. */
5589 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5592 enum memmodel model
;
5593 enum machine_mode mode
;
5595 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5596 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5597 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5599 return expand_atomic_test_and_set (target
, mem
, model
);
5603 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5604 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5607 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5610 enum machine_mode mode
;
5611 unsigned int mode_align
, type_align
;
5613 if (TREE_CODE (arg0
) != INTEGER_CST
)
5616 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5617 mode
= mode_for_size (size
, MODE_INT
, 0);
5618 mode_align
= GET_MODE_ALIGNMENT (mode
);
5620 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5621 type_align
= mode_align
;
5624 tree ttype
= TREE_TYPE (arg1
);
5626 /* This function is usually invoked and folded immediately by the front
5627 end before anything else has a chance to look at it. The pointer
5628 parameter at this point is usually cast to a void *, so check for that
5629 and look past the cast. */
5630 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5631 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5632 arg1
= TREE_OPERAND (arg1
, 0);
5634 ttype
= TREE_TYPE (arg1
);
5635 gcc_assert (POINTER_TYPE_P (ttype
));
5637 /* Get the underlying type of the object. */
5638 ttype
= TREE_TYPE (ttype
);
5639 type_align
= TYPE_ALIGN (ttype
);
5642 /* If the object has smaller alignment, the the lock free routines cannot
5644 if (type_align
< mode_align
)
5645 return boolean_false_node
;
5647 /* Check if a compare_and_swap pattern exists for the mode which represents
5648 the required size. The pattern is not allowed to fail, so the existence
5649 of the pattern indicates support is present. */
5650 if (can_compare_and_swap_p (mode
, true))
5651 return boolean_true_node
;
5653 return boolean_false_node
;
5656 /* Return true if the parameters to call EXP represent an object which will
5657 always generate lock free instructions. The first argument represents the
5658 size of the object, and the second parameter is a pointer to the object
5659 itself. If NULL is passed for the object, then the result is based on
5660 typical alignment for an object of the specified size. Otherwise return
5664 expand_builtin_atomic_always_lock_free (tree exp
)
5667 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5668 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5670 if (TREE_CODE (arg0
) != INTEGER_CST
)
5672 error ("non-constant argument 1 to __atomic_always_lock_free");
5676 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5677 if (size
== boolean_true_node
)
5682 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5683 is lock free on this architecture. */
5686 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5688 if (!flag_inline_atomics
)
5691 /* If it isn't always lock free, don't generate a result. */
5692 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5693 return boolean_true_node
;
5698 /* Return true if the parameters to call EXP represent an object which will
5699 always generate lock free instructions. The first argument represents the
5700 size of the object, and the second parameter is a pointer to the object
5701 itself. If NULL is passed for the object, then the result is based on
5702 typical alignment for an object of the specified size. Otherwise return
5706 expand_builtin_atomic_is_lock_free (tree exp
)
5709 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5710 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5712 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5714 error ("non-integer argument 1 to __atomic_is_lock_free");
5718 if (!flag_inline_atomics
)
5721 /* If the value is known at compile time, return the RTX for it. */
5722 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5723 if (size
== boolean_true_node
)
5729 /* Expand the __atomic_thread_fence intrinsic:
5730 void __atomic_thread_fence (enum memmodel)
5731 EXP is the CALL_EXPR. */
5734 expand_builtin_atomic_thread_fence (tree exp
)
5736 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5737 expand_mem_thread_fence (model
);
5740 /* Expand the __atomic_signal_fence intrinsic:
5741 void __atomic_signal_fence (enum memmodel)
5742 EXP is the CALL_EXPR. */
5745 expand_builtin_atomic_signal_fence (tree exp
)
5747 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5748 expand_mem_signal_fence (model
);
5751 /* Expand the __sync_synchronize intrinsic. */
5754 expand_builtin_sync_synchronize (void)
5756 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5760 expand_builtin_thread_pointer (tree exp
, rtx target
)
5762 enum insn_code icode
;
5763 if (!validate_arglist (exp
, VOID_TYPE
))
5765 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5766 if (icode
!= CODE_FOR_nothing
)
5768 struct expand_operand op
;
5769 if (!REG_P (target
) || GET_MODE (target
) != Pmode
)
5770 target
= gen_reg_rtx (Pmode
);
5771 create_output_operand (&op
, target
, Pmode
);
5772 expand_insn (icode
, 1, &op
);
5775 error ("__builtin_thread_pointer is not supported on this target");
5780 expand_builtin_set_thread_pointer (tree exp
)
5782 enum insn_code icode
;
5783 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5785 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5786 if (icode
!= CODE_FOR_nothing
)
5788 struct expand_operand op
;
5789 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5790 Pmode
, EXPAND_NORMAL
);
5791 create_input_operand (&op
, val
, Pmode
);
5792 expand_insn (icode
, 1, &op
);
5795 error ("__builtin_set_thread_pointer is not supported on this target");
5799 /* Expand an expression EXP that calls a built-in function,
5800 with result going to TARGET if that's convenient
5801 (and in mode MODE if that's convenient).
5802 SUBTARGET may be used as the target for computing one of EXP's operands.
5803 IGNORE is nonzero if the value is to be ignored. */
5806 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5809 tree fndecl
= get_callee_fndecl (exp
);
5810 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5811 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5814 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5815 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5817 /* When not optimizing, generate calls to library functions for a certain
5820 && !called_as_built_in (fndecl
)
5821 && fcode
!= BUILT_IN_ALLOCA
5822 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5823 && fcode
!= BUILT_IN_FREE
)
5824 return expand_call (exp
, target
, ignore
);
5826 /* The built-in function expanders test for target == const0_rtx
5827 to determine whether the function's result will be ignored. */
5829 target
= const0_rtx
;
5831 /* If the result of a pure or const built-in function is ignored, and
5832 none of its arguments are volatile, we can avoid expanding the
5833 built-in call and just evaluate the arguments for side-effects. */
5834 if (target
== const0_rtx
5835 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5836 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5838 bool volatilep
= false;
5840 call_expr_arg_iterator iter
;
5842 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5843 if (TREE_THIS_VOLATILE (arg
))
5851 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5852 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5859 CASE_FLT_FN (BUILT_IN_FABS
):
5860 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5865 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5866 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5871 /* Just do a normal library call if we were unable to fold
5873 CASE_FLT_FN (BUILT_IN_CABS
):
5876 CASE_FLT_FN (BUILT_IN_EXP
):
5877 CASE_FLT_FN (BUILT_IN_EXP10
):
5878 CASE_FLT_FN (BUILT_IN_POW10
):
5879 CASE_FLT_FN (BUILT_IN_EXP2
):
5880 CASE_FLT_FN (BUILT_IN_EXPM1
):
5881 CASE_FLT_FN (BUILT_IN_LOGB
):
5882 CASE_FLT_FN (BUILT_IN_LOG
):
5883 CASE_FLT_FN (BUILT_IN_LOG10
):
5884 CASE_FLT_FN (BUILT_IN_LOG2
):
5885 CASE_FLT_FN (BUILT_IN_LOG1P
):
5886 CASE_FLT_FN (BUILT_IN_TAN
):
5887 CASE_FLT_FN (BUILT_IN_ASIN
):
5888 CASE_FLT_FN (BUILT_IN_ACOS
):
5889 CASE_FLT_FN (BUILT_IN_ATAN
):
5890 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5891 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5892 because of possible accuracy problems. */
5893 if (! flag_unsafe_math_optimizations
)
5895 CASE_FLT_FN (BUILT_IN_SQRT
):
5896 CASE_FLT_FN (BUILT_IN_FLOOR
):
5897 CASE_FLT_FN (BUILT_IN_CEIL
):
5898 CASE_FLT_FN (BUILT_IN_TRUNC
):
5899 CASE_FLT_FN (BUILT_IN_ROUND
):
5900 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5901 CASE_FLT_FN (BUILT_IN_RINT
):
5902 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5907 CASE_FLT_FN (BUILT_IN_FMA
):
5908 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5913 CASE_FLT_FN (BUILT_IN_ILOGB
):
5914 if (! flag_unsafe_math_optimizations
)
5916 CASE_FLT_FN (BUILT_IN_ISINF
):
5917 CASE_FLT_FN (BUILT_IN_FINITE
):
5918 case BUILT_IN_ISFINITE
:
5919 case BUILT_IN_ISNORMAL
:
5920 target
= expand_builtin_interclass_mathfn (exp
, target
);
5925 CASE_FLT_FN (BUILT_IN_ICEIL
):
5926 CASE_FLT_FN (BUILT_IN_LCEIL
):
5927 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5928 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5929 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5930 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5931 target
= expand_builtin_int_roundingfn (exp
, target
);
5936 CASE_FLT_FN (BUILT_IN_IRINT
):
5937 CASE_FLT_FN (BUILT_IN_LRINT
):
5938 CASE_FLT_FN (BUILT_IN_LLRINT
):
5939 CASE_FLT_FN (BUILT_IN_IROUND
):
5940 CASE_FLT_FN (BUILT_IN_LROUND
):
5941 CASE_FLT_FN (BUILT_IN_LLROUND
):
5942 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5947 CASE_FLT_FN (BUILT_IN_POWI
):
5948 target
= expand_builtin_powi (exp
, target
);
5953 CASE_FLT_FN (BUILT_IN_ATAN2
):
5954 CASE_FLT_FN (BUILT_IN_LDEXP
):
5955 CASE_FLT_FN (BUILT_IN_SCALB
):
5956 CASE_FLT_FN (BUILT_IN_SCALBN
):
5957 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5958 if (! flag_unsafe_math_optimizations
)
5961 CASE_FLT_FN (BUILT_IN_FMOD
):
5962 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5963 CASE_FLT_FN (BUILT_IN_DREM
):
5964 CASE_FLT_FN (BUILT_IN_POW
):
5965 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5970 CASE_FLT_FN (BUILT_IN_CEXPI
):
5971 target
= expand_builtin_cexpi (exp
, target
);
5972 gcc_assert (target
);
5975 CASE_FLT_FN (BUILT_IN_SIN
):
5976 CASE_FLT_FN (BUILT_IN_COS
):
5977 if (! flag_unsafe_math_optimizations
)
5979 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5984 CASE_FLT_FN (BUILT_IN_SINCOS
):
5985 if (! flag_unsafe_math_optimizations
)
5987 target
= expand_builtin_sincos (exp
);
5992 case BUILT_IN_APPLY_ARGS
:
5993 return expand_builtin_apply_args ();
5995 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5996 FUNCTION with a copy of the parameters described by
5997 ARGUMENTS, and ARGSIZE. It returns a block of memory
5998 allocated on the stack into which is stored all the registers
5999 that might possibly be used for returning the result of a
6000 function. ARGUMENTS is the value returned by
6001 __builtin_apply_args. ARGSIZE is the number of bytes of
6002 arguments that must be copied. ??? How should this value be
6003 computed? We'll also need a safe worst case value for varargs
6005 case BUILT_IN_APPLY
:
6006 if (!validate_arglist (exp
, POINTER_TYPE
,
6007 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6008 && !validate_arglist (exp
, REFERENCE_TYPE
,
6009 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6015 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6016 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6017 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6019 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6022 /* __builtin_return (RESULT) causes the function to return the
6023 value described by RESULT. RESULT is address of the block of
6024 memory returned by __builtin_apply. */
6025 case BUILT_IN_RETURN
:
6026 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6027 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6030 case BUILT_IN_SAVEREGS
:
6031 return expand_builtin_saveregs ();
6033 case BUILT_IN_VA_ARG_PACK
:
6034 /* All valid uses of __builtin_va_arg_pack () are removed during
6036 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6039 case BUILT_IN_VA_ARG_PACK_LEN
:
6040 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6042 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6045 /* Return the address of the first anonymous stack arg. */
6046 case BUILT_IN_NEXT_ARG
:
6047 if (fold_builtin_next_arg (exp
, false))
6049 return expand_builtin_next_arg ();
6051 case BUILT_IN_CLEAR_CACHE
:
6052 target
= expand_builtin___clear_cache (exp
);
6057 case BUILT_IN_CLASSIFY_TYPE
:
6058 return expand_builtin_classify_type (exp
);
6060 case BUILT_IN_CONSTANT_P
:
6063 case BUILT_IN_FRAME_ADDRESS
:
6064 case BUILT_IN_RETURN_ADDRESS
:
6065 return expand_builtin_frame_address (fndecl
, exp
);
6067 /* Returns the address of the area where the structure is returned.
6069 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6070 if (call_expr_nargs (exp
) != 0
6071 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6072 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6075 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6077 case BUILT_IN_ALLOCA
:
6078 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6079 /* If the allocation stems from the declaration of a variable-sized
6080 object, it cannot accumulate. */
6081 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6086 case BUILT_IN_STACK_SAVE
:
6087 return expand_stack_save ();
6089 case BUILT_IN_STACK_RESTORE
:
6090 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6093 case BUILT_IN_BSWAP16
:
6094 case BUILT_IN_BSWAP32
:
6095 case BUILT_IN_BSWAP64
:
6096 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6101 CASE_INT_FN (BUILT_IN_FFS
):
6102 case BUILT_IN_FFSIMAX
:
6103 target
= expand_builtin_unop (target_mode
, exp
, target
,
6104 subtarget
, ffs_optab
);
6109 CASE_INT_FN (BUILT_IN_CLZ
):
6110 case BUILT_IN_CLZIMAX
:
6111 target
= expand_builtin_unop (target_mode
, exp
, target
,
6112 subtarget
, clz_optab
);
6117 CASE_INT_FN (BUILT_IN_CTZ
):
6118 case BUILT_IN_CTZIMAX
:
6119 target
= expand_builtin_unop (target_mode
, exp
, target
,
6120 subtarget
, ctz_optab
);
6125 CASE_INT_FN (BUILT_IN_CLRSB
):
6126 case BUILT_IN_CLRSBIMAX
:
6127 target
= expand_builtin_unop (target_mode
, exp
, target
,
6128 subtarget
, clrsb_optab
);
6133 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6134 case BUILT_IN_POPCOUNTIMAX
:
6135 target
= expand_builtin_unop (target_mode
, exp
, target
,
6136 subtarget
, popcount_optab
);
6141 CASE_INT_FN (BUILT_IN_PARITY
):
6142 case BUILT_IN_PARITYIMAX
:
6143 target
= expand_builtin_unop (target_mode
, exp
, target
,
6144 subtarget
, parity_optab
);
6149 case BUILT_IN_STRLEN
:
6150 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6155 case BUILT_IN_STRCPY
:
6156 target
= expand_builtin_strcpy (exp
, target
);
6161 case BUILT_IN_STRNCPY
:
6162 target
= expand_builtin_strncpy (exp
, target
);
6167 case BUILT_IN_STPCPY
:
6168 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6173 case BUILT_IN_MEMCPY
:
6174 target
= expand_builtin_memcpy (exp
, target
);
6179 case BUILT_IN_MEMPCPY
:
6180 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6185 case BUILT_IN_MEMSET
:
6186 target
= expand_builtin_memset (exp
, target
, mode
);
6191 case BUILT_IN_BZERO
:
6192 target
= expand_builtin_bzero (exp
);
6197 case BUILT_IN_STRCMP
:
6198 target
= expand_builtin_strcmp (exp
, target
);
6203 case BUILT_IN_STRNCMP
:
6204 target
= expand_builtin_strncmp (exp
, target
, mode
);
6210 case BUILT_IN_MEMCMP
:
6211 target
= expand_builtin_memcmp (exp
, target
, mode
);
6216 case BUILT_IN_SETJMP
:
6217 /* This should have been lowered to the builtins below. */
6220 case BUILT_IN_SETJMP_SETUP
:
6221 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6222 and the receiver label. */
6223 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6225 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6226 VOIDmode
, EXPAND_NORMAL
);
6227 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6228 rtx label_r
= label_rtx (label
);
6230 /* This is copied from the handling of non-local gotos. */
6231 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6232 nonlocal_goto_handler_labels
6233 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6234 nonlocal_goto_handler_labels
);
6235 /* ??? Do not let expand_label treat us as such since we would
6236 not want to be both on the list of non-local labels and on
6237 the list of forced labels. */
6238 FORCED_LABEL (label
) = 0;
6243 case BUILT_IN_SETJMP_DISPATCHER
:
6244 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6245 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6247 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6248 rtx label_r
= label_rtx (label
);
6250 /* Remove the dispatcher label from the list of non-local labels
6251 since the receiver labels have been added to it above. */
6252 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6257 case BUILT_IN_SETJMP_RECEIVER
:
6258 /* __builtin_setjmp_receiver is passed the receiver label. */
6259 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6261 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6262 rtx label_r
= label_rtx (label
);
6264 expand_builtin_setjmp_receiver (label_r
);
6269 /* __builtin_longjmp is passed a pointer to an array of five words.
6270 It's similar to the C library longjmp function but works with
6271 __builtin_setjmp above. */
6272 case BUILT_IN_LONGJMP
:
6273 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6275 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6276 VOIDmode
, EXPAND_NORMAL
);
6277 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6279 if (value
!= const1_rtx
)
6281 error ("%<__builtin_longjmp%> second argument must be 1");
6285 expand_builtin_longjmp (buf_addr
, value
);
6290 case BUILT_IN_NONLOCAL_GOTO
:
6291 target
= expand_builtin_nonlocal_goto (exp
);
6296 /* This updates the setjmp buffer that is its argument with the value
6297 of the current stack pointer. */
6298 case BUILT_IN_UPDATE_SETJMP_BUF
:
6299 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6302 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6304 expand_builtin_update_setjmp_buf (buf_addr
);
6310 expand_builtin_trap ();
6313 case BUILT_IN_UNREACHABLE
:
6314 expand_builtin_unreachable ();
6317 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6318 case BUILT_IN_SIGNBITD32
:
6319 case BUILT_IN_SIGNBITD64
:
6320 case BUILT_IN_SIGNBITD128
:
6321 target
= expand_builtin_signbit (exp
, target
);
6326 /* Various hooks for the DWARF 2 __throw routine. */
6327 case BUILT_IN_UNWIND_INIT
:
6328 expand_builtin_unwind_init ();
6330 case BUILT_IN_DWARF_CFA
:
6331 return virtual_cfa_rtx
;
6332 #ifdef DWARF2_UNWIND_INFO
6333 case BUILT_IN_DWARF_SP_COLUMN
:
6334 return expand_builtin_dwarf_sp_column ();
6335 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6336 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6339 case BUILT_IN_FROB_RETURN_ADDR
:
6340 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6341 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6342 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6343 case BUILT_IN_EH_RETURN
:
6344 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6345 CALL_EXPR_ARG (exp
, 1));
6347 #ifdef EH_RETURN_DATA_REGNO
6348 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6349 return expand_builtin_eh_return_data_regno (exp
);
6351 case BUILT_IN_EXTEND_POINTER
:
6352 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6353 case BUILT_IN_EH_POINTER
:
6354 return expand_builtin_eh_pointer (exp
);
6355 case BUILT_IN_EH_FILTER
:
6356 return expand_builtin_eh_filter (exp
);
6357 case BUILT_IN_EH_COPY_VALUES
:
6358 return expand_builtin_eh_copy_values (exp
);
6360 case BUILT_IN_VA_START
:
6361 return expand_builtin_va_start (exp
);
6362 case BUILT_IN_VA_END
:
6363 return expand_builtin_va_end (exp
);
6364 case BUILT_IN_VA_COPY
:
6365 return expand_builtin_va_copy (exp
);
6366 case BUILT_IN_EXPECT
:
6367 return expand_builtin_expect (exp
, target
);
6368 case BUILT_IN_ASSUME_ALIGNED
:
6369 return expand_builtin_assume_aligned (exp
, target
);
6370 case BUILT_IN_PREFETCH
:
6371 expand_builtin_prefetch (exp
);
6374 case BUILT_IN_INIT_TRAMPOLINE
:
6375 return expand_builtin_init_trampoline (exp
, true);
6376 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6377 return expand_builtin_init_trampoline (exp
, false);
6378 case BUILT_IN_ADJUST_TRAMPOLINE
:
6379 return expand_builtin_adjust_trampoline (exp
);
6382 case BUILT_IN_EXECL
:
6383 case BUILT_IN_EXECV
:
6384 case BUILT_IN_EXECLP
:
6385 case BUILT_IN_EXECLE
:
6386 case BUILT_IN_EXECVP
:
6387 case BUILT_IN_EXECVE
:
6388 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6393 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6394 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6395 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6396 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6397 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6398 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6399 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6404 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6405 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6406 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6407 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6408 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6409 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6410 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6415 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6416 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6417 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6418 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6419 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6420 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6421 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6426 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6427 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6428 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6429 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6430 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6431 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6432 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6437 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6438 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6439 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6440 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6441 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6442 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6443 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6448 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6449 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6450 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6451 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6452 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6453 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6454 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6459 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6460 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6461 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6462 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6463 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6464 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6465 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6470 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6471 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6472 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6473 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6474 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6475 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6476 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6481 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6482 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6483 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6484 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6485 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6486 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6487 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6492 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6493 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6494 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6495 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6496 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6497 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6498 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6503 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6504 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6505 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6506 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6507 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6508 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6509 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6514 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6515 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6516 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6517 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6518 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6519 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6520 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6525 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6526 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6527 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6528 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6529 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6530 if (mode
== VOIDmode
)
6531 mode
= TYPE_MODE (boolean_type_node
);
6532 if (!target
|| !register_operand (target
, mode
))
6533 target
= gen_reg_rtx (mode
);
6535 mode
= get_builtin_sync_mode
6536 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6537 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6542 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6543 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6544 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6545 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6546 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6547 mode
= get_builtin_sync_mode
6548 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6549 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6554 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6555 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6556 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6557 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6558 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6559 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6560 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6565 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6566 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6567 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6568 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6569 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6570 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6571 expand_builtin_sync_lock_release (mode
, exp
);
6574 case BUILT_IN_SYNC_SYNCHRONIZE
:
6575 expand_builtin_sync_synchronize ();
6578 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6579 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6580 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6581 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6582 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6583 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6584 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6589 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6590 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6591 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6592 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6593 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6595 unsigned int nargs
, z
;
6599 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6600 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6604 /* If this is turned into an external library call, the weak parameter
6605 must be dropped to match the expected parameter list. */
6606 nargs
= call_expr_nargs (exp
);
6607 vec
= VEC_alloc (tree
, gc
, nargs
- 1);
6608 for (z
= 0; z
< 3; z
++)
6609 VEC_quick_push (tree
, vec
, CALL_EXPR_ARG (exp
, z
));
6610 /* Skip the boolean weak parameter. */
6611 for (z
= 4; z
< 6; z
++)
6612 VEC_quick_push (tree
, vec
, CALL_EXPR_ARG (exp
, z
));
6613 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6617 case BUILT_IN_ATOMIC_LOAD_1
:
6618 case BUILT_IN_ATOMIC_LOAD_2
:
6619 case BUILT_IN_ATOMIC_LOAD_4
:
6620 case BUILT_IN_ATOMIC_LOAD_8
:
6621 case BUILT_IN_ATOMIC_LOAD_16
:
6622 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6623 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6628 case BUILT_IN_ATOMIC_STORE_1
:
6629 case BUILT_IN_ATOMIC_STORE_2
:
6630 case BUILT_IN_ATOMIC_STORE_4
:
6631 case BUILT_IN_ATOMIC_STORE_8
:
6632 case BUILT_IN_ATOMIC_STORE_16
:
6633 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6634 target
= expand_builtin_atomic_store (mode
, exp
);
6639 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6640 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6641 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6642 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6643 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6645 enum built_in_function lib
;
6646 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6647 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6648 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6649 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6655 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6656 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6657 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6658 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6659 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6661 enum built_in_function lib
;
6662 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6663 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6664 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6665 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6671 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6672 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6673 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6674 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6675 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6677 enum built_in_function lib
;
6678 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6679 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6680 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6681 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6687 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6688 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6689 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6690 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6691 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6693 enum built_in_function lib
;
6694 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6695 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6696 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6697 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6703 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6704 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6705 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6706 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6707 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6709 enum built_in_function lib
;
6710 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6711 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6712 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6713 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6719 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6720 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6721 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6722 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6723 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6725 enum built_in_function lib
;
6726 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6727 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6728 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6729 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6735 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6736 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6737 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6738 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6739 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6740 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6741 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6742 ignore
, BUILT_IN_NONE
);
6747 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6748 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6749 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6750 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6751 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6752 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6753 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6754 ignore
, BUILT_IN_NONE
);
6759 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6760 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6761 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6762 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6763 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6764 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6765 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6766 ignore
, BUILT_IN_NONE
);
6771 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6772 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6773 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6774 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6775 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6776 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6777 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6778 ignore
, BUILT_IN_NONE
);
6783 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6784 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6785 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6786 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6787 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6788 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6789 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6790 ignore
, BUILT_IN_NONE
);
6795 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6796 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6797 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6798 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6799 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6800 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6801 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6802 ignore
, BUILT_IN_NONE
);
6807 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6808 return expand_builtin_atomic_test_and_set (exp
, target
);
6810 case BUILT_IN_ATOMIC_CLEAR
:
6811 return expand_builtin_atomic_clear (exp
);
6813 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6814 return expand_builtin_atomic_always_lock_free (exp
);
6816 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6817 target
= expand_builtin_atomic_is_lock_free (exp
);
6822 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6823 expand_builtin_atomic_thread_fence (exp
);
6826 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6827 expand_builtin_atomic_signal_fence (exp
);
6830 case BUILT_IN_OBJECT_SIZE
:
6831 return expand_builtin_object_size (exp
);
6833 case BUILT_IN_MEMCPY_CHK
:
6834 case BUILT_IN_MEMPCPY_CHK
:
6835 case BUILT_IN_MEMMOVE_CHK
:
6836 case BUILT_IN_MEMSET_CHK
:
6837 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6842 case BUILT_IN_STRCPY_CHK
:
6843 case BUILT_IN_STPCPY_CHK
:
6844 case BUILT_IN_STRNCPY_CHK
:
6845 case BUILT_IN_STPNCPY_CHK
:
6846 case BUILT_IN_STRCAT_CHK
:
6847 case BUILT_IN_STRNCAT_CHK
:
6848 case BUILT_IN_SNPRINTF_CHK
:
6849 case BUILT_IN_VSNPRINTF_CHK
:
6850 maybe_emit_chk_warning (exp
, fcode
);
6853 case BUILT_IN_SPRINTF_CHK
:
6854 case BUILT_IN_VSPRINTF_CHK
:
6855 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6859 if (warn_free_nonheap_object
)
6860 maybe_emit_free_warning (exp
);
6863 case BUILT_IN_THREAD_POINTER
:
6864 return expand_builtin_thread_pointer (exp
, target
);
6866 case BUILT_IN_SET_THREAD_POINTER
:
6867 expand_builtin_set_thread_pointer (exp
);
6870 default: /* just do library call, if unknown builtin */
6874 /* The switch statement above can drop through to cause the function
6875 to be called normally. */
6876 return expand_call (exp
, target
, ignore
);
6879 /* Determine whether a tree node represents a call to a built-in
6880 function. If the tree T is a call to a built-in function with
6881 the right number of arguments of the appropriate types, return
6882 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6883 Otherwise the return value is END_BUILTINS. */
6885 enum built_in_function
6886 builtin_mathfn_code (const_tree t
)
6888 const_tree fndecl
, arg
, parmlist
;
6889 const_tree argtype
, parmtype
;
6890 const_call_expr_arg_iterator iter
;
6892 if (TREE_CODE (t
) != CALL_EXPR
6893 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6894 return END_BUILTINS
;
6896 fndecl
= get_callee_fndecl (t
);
6897 if (fndecl
== NULL_TREE
6898 || TREE_CODE (fndecl
) != FUNCTION_DECL
6899 || ! DECL_BUILT_IN (fndecl
)
6900 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6901 return END_BUILTINS
;
6903 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6904 init_const_call_expr_arg_iterator (t
, &iter
);
6905 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6907 /* If a function doesn't take a variable number of arguments,
6908 the last element in the list will have type `void'. */
6909 parmtype
= TREE_VALUE (parmlist
);
6910 if (VOID_TYPE_P (parmtype
))
6912 if (more_const_call_expr_args_p (&iter
))
6913 return END_BUILTINS
;
6914 return DECL_FUNCTION_CODE (fndecl
);
6917 if (! more_const_call_expr_args_p (&iter
))
6918 return END_BUILTINS
;
6920 arg
= next_const_call_expr_arg (&iter
);
6921 argtype
= TREE_TYPE (arg
);
6923 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6925 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6926 return END_BUILTINS
;
6928 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6930 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6931 return END_BUILTINS
;
6933 else if (POINTER_TYPE_P (parmtype
))
6935 if (! POINTER_TYPE_P (argtype
))
6936 return END_BUILTINS
;
6938 else if (INTEGRAL_TYPE_P (parmtype
))
6940 if (! INTEGRAL_TYPE_P (argtype
))
6941 return END_BUILTINS
;
6944 return END_BUILTINS
;
6947 /* Variable-length argument list. */
6948 return DECL_FUNCTION_CODE (fndecl
);
6951 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6952 evaluate to a constant. */
6955 fold_builtin_constant_p (tree arg
)
6957 /* We return 1 for a numeric type that's known to be a constant
6958 value at compile-time or for an aggregate type that's a
6959 literal constant. */
6962 /* If we know this is a constant, emit the constant of one. */
6963 if (CONSTANT_CLASS_P (arg
)
6964 || (TREE_CODE (arg
) == CONSTRUCTOR
6965 && TREE_CONSTANT (arg
)))
6966 return integer_one_node
;
6967 if (TREE_CODE (arg
) == ADDR_EXPR
)
6969 tree op
= TREE_OPERAND (arg
, 0);
6970 if (TREE_CODE (op
) == STRING_CST
6971 || (TREE_CODE (op
) == ARRAY_REF
6972 && integer_zerop (TREE_OPERAND (op
, 1))
6973 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6974 return integer_one_node
;
6977 /* If this expression has side effects, show we don't know it to be a
6978 constant. Likewise if it's a pointer or aggregate type since in
6979 those case we only want literals, since those are only optimized
6980 when generating RTL, not later.
6981 And finally, if we are compiling an initializer, not code, we
6982 need to return a definite result now; there's not going to be any
6983 more optimization done. */
6984 if (TREE_SIDE_EFFECTS (arg
)
6985 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6986 || POINTER_TYPE_P (TREE_TYPE (arg
))
6988 || folding_initializer
)
6989 return integer_zero_node
;
6994 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6995 return it as a truthvalue. */
6998 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
7000 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7002 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7003 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7004 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7005 pred_type
= TREE_VALUE (arg_types
);
7006 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7008 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7009 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7010 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
7012 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7013 build_int_cst (ret_type
, 0));
7016 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7017 NULL_TREE if no simplification is possible. */
7020 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
7022 tree inner
, fndecl
, inner_arg0
;
7023 enum tree_code code
;
7025 /* Distribute the expected value over short-circuiting operators.
7026 See through the cast from truthvalue_type_node to long. */
7028 while (TREE_CODE (inner_arg0
) == NOP_EXPR
7029 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7030 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7031 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7033 /* If this is a builtin_expect within a builtin_expect keep the
7034 inner one. See through a comparison against a constant. It
7035 might have been added to create a thruthvalue. */
7038 if (COMPARISON_CLASS_P (inner
)
7039 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7040 inner
= TREE_OPERAND (inner
, 0);
7042 if (TREE_CODE (inner
) == CALL_EXPR
7043 && (fndecl
= get_callee_fndecl (inner
))
7044 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7045 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7049 code
= TREE_CODE (inner
);
7050 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7052 tree op0
= TREE_OPERAND (inner
, 0);
7053 tree op1
= TREE_OPERAND (inner
, 1);
7055 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
7056 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
7057 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7059 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7062 /* If the argument isn't invariant then there's nothing else we can do. */
7063 if (!TREE_CONSTANT (inner_arg0
))
7066 /* If we expect that a comparison against the argument will fold to
7067 a constant return the constant. In practice, this means a true
7068 constant or the address of a non-weak symbol. */
7071 if (TREE_CODE (inner
) == ADDR_EXPR
)
7075 inner
= TREE_OPERAND (inner
, 0);
7077 while (TREE_CODE (inner
) == COMPONENT_REF
7078 || TREE_CODE (inner
) == ARRAY_REF
);
7079 if ((TREE_CODE (inner
) == VAR_DECL
7080 || TREE_CODE (inner
) == FUNCTION_DECL
)
7081 && DECL_WEAK (inner
))
7085 /* Otherwise, ARG0 already has the proper type for the return value. */
7089 /* Fold a call to __builtin_classify_type with argument ARG. */
7092 fold_builtin_classify_type (tree arg
)
7095 return build_int_cst (integer_type_node
, no_type_class
);
7097 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7100 /* Fold a call to __builtin_strlen with argument ARG. */
7103 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7105 if (!validate_arg (arg
, POINTER_TYPE
))
7109 tree len
= c_strlen (arg
, 0);
7112 return fold_convert_loc (loc
, type
, len
);
7118 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7121 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7123 REAL_VALUE_TYPE real
;
7125 /* __builtin_inff is intended to be usable to define INFINITY on all
7126 targets. If an infinity is not available, INFINITY expands "to a
7127 positive constant of type float that overflows at translation
7128 time", footnote "In this case, using INFINITY will violate the
7129 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7130 Thus we pedwarn to ensure this constraint violation is
7132 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7133 pedwarn (loc
, 0, "target format does not support infinity");
7136 return build_real (type
, real
);
7139 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7142 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7144 REAL_VALUE_TYPE real
;
7147 if (!validate_arg (arg
, POINTER_TYPE
))
7149 str
= c_getstr (arg
);
7153 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7156 return build_real (type
, real
);
7159 /* Return true if the floating point expression T has an integer value.
7160 We also allow +Inf, -Inf and NaN to be considered integer values. */
7163 integer_valued_real_p (tree t
)
7165 switch (TREE_CODE (t
))
7172 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7177 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7184 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7185 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7188 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7189 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7192 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7196 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7197 if (TREE_CODE (type
) == INTEGER_TYPE
)
7199 if (TREE_CODE (type
) == REAL_TYPE
)
7200 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7205 switch (builtin_mathfn_code (t
))
7207 CASE_FLT_FN (BUILT_IN_CEIL
):
7208 CASE_FLT_FN (BUILT_IN_FLOOR
):
7209 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7210 CASE_FLT_FN (BUILT_IN_RINT
):
7211 CASE_FLT_FN (BUILT_IN_ROUND
):
7212 CASE_FLT_FN (BUILT_IN_TRUNC
):
7215 CASE_FLT_FN (BUILT_IN_FMIN
):
7216 CASE_FLT_FN (BUILT_IN_FMAX
):
7217 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7218 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7231 /* FNDECL is assumed to be a builtin where truncation can be propagated
7232 across (for instance floor((double)f) == (double)floorf (f).
7233 Do the transformation for a call with argument ARG. */
7236 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7238 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7240 if (!validate_arg (arg
, REAL_TYPE
))
7243 /* Integer rounding functions are idempotent. */
7244 if (fcode
== builtin_mathfn_code (arg
))
7247 /* If argument is already integer valued, and we don't need to worry
7248 about setting errno, there's no need to perform rounding. */
7249 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7254 tree arg0
= strip_float_extensions (arg
);
7255 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7256 tree newtype
= TREE_TYPE (arg0
);
7259 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7260 && (decl
= mathfn_built_in (newtype
, fcode
)))
7261 return fold_convert_loc (loc
, ftype
,
7262 build_call_expr_loc (loc
, decl
, 1,
7263 fold_convert_loc (loc
,
7270 /* FNDECL is assumed to be builtin which can narrow the FP type of
7271 the argument, for instance lround((double)f) -> lroundf (f).
7272 Do the transformation for a call with argument ARG. */
7275 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7277 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7279 if (!validate_arg (arg
, REAL_TYPE
))
7282 /* If argument is already integer valued, and we don't need to worry
7283 about setting errno, there's no need to perform rounding. */
7284 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7285 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7286 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7290 tree ftype
= TREE_TYPE (arg
);
7291 tree arg0
= strip_float_extensions (arg
);
7292 tree newtype
= TREE_TYPE (arg0
);
7295 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7296 && (decl
= mathfn_built_in (newtype
, fcode
)))
7297 return build_call_expr_loc (loc
, decl
, 1,
7298 fold_convert_loc (loc
, newtype
, arg0
));
7301 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7302 sizeof (int) == sizeof (long). */
7303 if (TYPE_PRECISION (integer_type_node
)
7304 == TYPE_PRECISION (long_integer_type_node
))
7306 tree newfn
= NULL_TREE
;
7309 CASE_FLT_FN (BUILT_IN_ICEIL
):
7310 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7313 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7314 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7317 CASE_FLT_FN (BUILT_IN_IROUND
):
7318 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7321 CASE_FLT_FN (BUILT_IN_IRINT
):
7322 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7331 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7332 return fold_convert_loc (loc
,
7333 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7337 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7338 sizeof (long long) == sizeof (long). */
7339 if (TYPE_PRECISION (long_long_integer_type_node
)
7340 == TYPE_PRECISION (long_integer_type_node
))
7342 tree newfn
= NULL_TREE
;
7345 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7346 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7349 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7350 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7353 CASE_FLT_FN (BUILT_IN_LLROUND
):
7354 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7357 CASE_FLT_FN (BUILT_IN_LLRINT
):
7358 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7367 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7368 return fold_convert_loc (loc
,
7369 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7376 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7377 return type. Return NULL_TREE if no simplification can be made. */
7380 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7384 if (!validate_arg (arg
, COMPLEX_TYPE
)
7385 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7388 /* Calculate the result when the argument is a constant. */
7389 if (TREE_CODE (arg
) == COMPLEX_CST
7390 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7394 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7396 tree real
= TREE_OPERAND (arg
, 0);
7397 tree imag
= TREE_OPERAND (arg
, 1);
7399 /* If either part is zero, cabs is fabs of the other. */
7400 if (real_zerop (real
))
7401 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7402 if (real_zerop (imag
))
7403 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7405 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7406 if (flag_unsafe_math_optimizations
7407 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7409 const REAL_VALUE_TYPE sqrt2_trunc
7410 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7412 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7413 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7414 build_real (type
, sqrt2_trunc
));
7418 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7419 if (TREE_CODE (arg
) == NEGATE_EXPR
7420 || TREE_CODE (arg
) == CONJ_EXPR
)
7421 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7423 /* Don't do this when optimizing for size. */
7424 if (flag_unsafe_math_optimizations
7425 && optimize
&& optimize_function_for_speed_p (cfun
))
7427 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7429 if (sqrtfn
!= NULL_TREE
)
7431 tree rpart
, ipart
, result
;
7433 arg
= builtin_save_expr (arg
);
7435 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7436 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7438 rpart
= builtin_save_expr (rpart
);
7439 ipart
= builtin_save_expr (ipart
);
7441 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7442 fold_build2_loc (loc
, MULT_EXPR
, type
,
7444 fold_build2_loc (loc
, MULT_EXPR
, type
,
7447 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7454 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7455 complex tree type of the result. If NEG is true, the imaginary
7456 zero is negative. */
7459 build_complex_cproj (tree type
, bool neg
)
7461 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7465 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7466 build_real (TREE_TYPE (type
), rzero
));
7469 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7470 return type. Return NULL_TREE if no simplification can be made. */
7473 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7475 if (!validate_arg (arg
, COMPLEX_TYPE
)
7476 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7479 /* If there are no infinities, return arg. */
7480 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7481 return non_lvalue_loc (loc
, arg
);
7483 /* Calculate the result when the argument is a constant. */
7484 if (TREE_CODE (arg
) == COMPLEX_CST
)
7486 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7487 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7489 if (real_isinf (real
) || real_isinf (imag
))
7490 return build_complex_cproj (type
, imag
->sign
);
7494 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7496 tree real
= TREE_OPERAND (arg
, 0);
7497 tree imag
= TREE_OPERAND (arg
, 1);
7502 /* If the real part is inf and the imag part is known to be
7503 nonnegative, return (inf + 0i). Remember side-effects are
7504 possible in the imag part. */
7505 if (TREE_CODE (real
) == REAL_CST
7506 && real_isinf (TREE_REAL_CST_PTR (real
))
7507 && tree_expr_nonnegative_p (imag
))
7508 return omit_one_operand_loc (loc
, type
,
7509 build_complex_cproj (type
, false),
7512 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7513 Remember side-effects are possible in the real part. */
7514 if (TREE_CODE (imag
) == REAL_CST
7515 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7517 omit_one_operand_loc (loc
, type
,
7518 build_complex_cproj (type
, TREE_REAL_CST_PTR
7519 (imag
)->sign
), arg
);
7525 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7526 Return NULL_TREE if no simplification can be made. */
7529 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7532 enum built_in_function fcode
;
7535 if (!validate_arg (arg
, REAL_TYPE
))
7538 /* Calculate the result when the argument is a constant. */
7539 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7542 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7543 fcode
= builtin_mathfn_code (arg
);
7544 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7546 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7547 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7548 CALL_EXPR_ARG (arg
, 0),
7549 build_real (type
, dconsthalf
));
7550 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7553 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7554 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7556 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7560 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7562 /* The inner root was either sqrt or cbrt. */
7563 /* This was a conditional expression but it triggered a bug
7565 REAL_VALUE_TYPE dconstroot
;
7566 if (BUILTIN_SQRT_P (fcode
))
7567 dconstroot
= dconsthalf
;
7569 dconstroot
= dconst_third ();
7571 /* Adjust for the outer root. */
7572 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7573 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7574 tree_root
= build_real (type
, dconstroot
);
7575 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7579 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7580 if (flag_unsafe_math_optimizations
7581 && (fcode
== BUILT_IN_POW
7582 || fcode
== BUILT_IN_POWF
7583 || fcode
== BUILT_IN_POWL
))
7585 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7586 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7587 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7589 if (!tree_expr_nonnegative_p (arg0
))
7590 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7591 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7592 build_real (type
, dconsthalf
));
7593 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7599 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7600 Return NULL_TREE if no simplification can be made. */
7603 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7605 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7608 if (!validate_arg (arg
, REAL_TYPE
))
7611 /* Calculate the result when the argument is a constant. */
7612 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7615 if (flag_unsafe_math_optimizations
)
7617 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7618 if (BUILTIN_EXPONENT_P (fcode
))
7620 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7621 const REAL_VALUE_TYPE third_trunc
=
7622 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7623 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7624 CALL_EXPR_ARG (arg
, 0),
7625 build_real (type
, third_trunc
));
7626 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7629 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7630 if (BUILTIN_SQRT_P (fcode
))
7632 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7636 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7638 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7640 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7641 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7642 tree_root
= build_real (type
, dconstroot
);
7643 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7647 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7648 if (BUILTIN_CBRT_P (fcode
))
7650 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7651 if (tree_expr_nonnegative_p (arg0
))
7653 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7658 REAL_VALUE_TYPE dconstroot
;
7660 real_arithmetic (&dconstroot
, MULT_EXPR
,
7661 dconst_third_ptr (), dconst_third_ptr ());
7662 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7663 tree_root
= build_real (type
, dconstroot
);
7664 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7669 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7670 if (fcode
== BUILT_IN_POW
7671 || fcode
== BUILT_IN_POWF
7672 || fcode
== BUILT_IN_POWL
)
7674 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7675 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7676 if (tree_expr_nonnegative_p (arg00
))
7678 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7679 const REAL_VALUE_TYPE dconstroot
7680 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7681 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7682 build_real (type
, dconstroot
));
7683 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7690 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7691 TYPE is the type of the return value. Return NULL_TREE if no
7692 simplification can be made. */
7695 fold_builtin_cos (location_t loc
,
7696 tree arg
, tree type
, tree fndecl
)
7700 if (!validate_arg (arg
, REAL_TYPE
))
7703 /* Calculate the result when the argument is a constant. */
7704 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7707 /* Optimize cos(-x) into cos (x). */
7708 if ((narg
= fold_strip_sign_ops (arg
)))
7709 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7714 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7715 Return NULL_TREE if no simplification can be made. */
7718 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7720 if (validate_arg (arg
, REAL_TYPE
))
7724 /* Calculate the result when the argument is a constant. */
7725 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7728 /* Optimize cosh(-x) into cosh (x). */
7729 if ((narg
= fold_strip_sign_ops (arg
)))
7730 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7736 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7737 argument ARG. TYPE is the type of the return value. Return
7738 NULL_TREE if no simplification can be made. */
7741 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7744 if (validate_arg (arg
, COMPLEX_TYPE
)
7745 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7749 /* Calculate the result when the argument is a constant. */
7750 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7753 /* Optimize fn(-x) into fn(x). */
7754 if ((tmp
= fold_strip_sign_ops (arg
)))
7755 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7761 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7762 Return NULL_TREE if no simplification can be made. */
7765 fold_builtin_tan (tree arg
, tree type
)
7767 enum built_in_function fcode
;
7770 if (!validate_arg (arg
, REAL_TYPE
))
7773 /* Calculate the result when the argument is a constant. */
7774 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7777 /* Optimize tan(atan(x)) = x. */
7778 fcode
= builtin_mathfn_code (arg
);
7779 if (flag_unsafe_math_optimizations
7780 && (fcode
== BUILT_IN_ATAN
7781 || fcode
== BUILT_IN_ATANF
7782 || fcode
== BUILT_IN_ATANL
))
7783 return CALL_EXPR_ARG (arg
, 0);
7788 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7789 NULL_TREE if no simplification can be made. */
7792 fold_builtin_sincos (location_t loc
,
7793 tree arg0
, tree arg1
, tree arg2
)
7798 if (!validate_arg (arg0
, REAL_TYPE
)
7799 || !validate_arg (arg1
, POINTER_TYPE
)
7800 || !validate_arg (arg2
, POINTER_TYPE
))
7803 type
= TREE_TYPE (arg0
);
7805 /* Calculate the result when the argument is a constant. */
7806 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7809 /* Canonicalize sincos to cexpi. */
7810 if (!TARGET_C99_FUNCTIONS
)
7812 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7816 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7817 call
= builtin_save_expr (call
);
7819 return build2 (COMPOUND_EXPR
, void_type_node
,
7820 build2 (MODIFY_EXPR
, void_type_node
,
7821 build_fold_indirect_ref_loc (loc
, arg1
),
7822 build1 (IMAGPART_EXPR
, type
, call
)),
7823 build2 (MODIFY_EXPR
, void_type_node
,
7824 build_fold_indirect_ref_loc (loc
, arg2
),
7825 build1 (REALPART_EXPR
, type
, call
)));
7828 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7829 NULL_TREE if no simplification can be made. */
7832 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7835 tree realp
, imagp
, ifn
;
7838 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7839 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7842 /* Calculate the result when the argument is a constant. */
7843 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7846 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7848 /* In case we can figure out the real part of arg0 and it is constant zero
7850 if (!TARGET_C99_FUNCTIONS
)
7852 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7856 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7857 && real_zerop (realp
))
7859 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7860 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7863 /* In case we can easily decompose real and imaginary parts split cexp
7864 to exp (r) * cexpi (i). */
7865 if (flag_unsafe_math_optimizations
7868 tree rfn
, rcall
, icall
;
7870 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7874 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7878 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7879 icall
= builtin_save_expr (icall
);
7880 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7881 rcall
= builtin_save_expr (rcall
);
7882 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7883 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7885 fold_build1_loc (loc
, REALPART_EXPR
,
7887 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7889 fold_build1_loc (loc
, IMAGPART_EXPR
,
7896 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7897 Return NULL_TREE if no simplification can be made. */
7900 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7902 if (!validate_arg (arg
, REAL_TYPE
))
7905 /* Optimize trunc of constant value. */
7906 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7908 REAL_VALUE_TYPE r
, x
;
7909 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7911 x
= TREE_REAL_CST (arg
);
7912 real_trunc (&r
, TYPE_MODE (type
), &x
);
7913 return build_real (type
, r
);
7916 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7919 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7920 Return NULL_TREE if no simplification can be made. */
7923 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7925 if (!validate_arg (arg
, REAL_TYPE
))
7928 /* Optimize floor of constant value. */
7929 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7933 x
= TREE_REAL_CST (arg
);
7934 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7936 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7939 real_floor (&r
, TYPE_MODE (type
), &x
);
7940 return build_real (type
, r
);
7944 /* Fold floor (x) where x is nonnegative to trunc (x). */
7945 if (tree_expr_nonnegative_p (arg
))
7947 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7949 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7952 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7955 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7956 Return NULL_TREE if no simplification can be made. */
7959 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7961 if (!validate_arg (arg
, REAL_TYPE
))
7964 /* Optimize ceil of constant value. */
7965 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7969 x
= TREE_REAL_CST (arg
);
7970 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7972 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7975 real_ceil (&r
, TYPE_MODE (type
), &x
);
7976 return build_real (type
, r
);
7980 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7983 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7984 Return NULL_TREE if no simplification can be made. */
7987 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7989 if (!validate_arg (arg
, REAL_TYPE
))
7992 /* Optimize round of constant value. */
7993 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7997 x
= TREE_REAL_CST (arg
);
7998 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8000 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8003 real_round (&r
, TYPE_MODE (type
), &x
);
8004 return build_real (type
, r
);
8008 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8011 /* Fold function call to builtin lround, lroundf or lroundl (or the
8012 corresponding long long versions) and other rounding functions. ARG
8013 is the argument to the call. Return NULL_TREE if no simplification
8017 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8019 if (!validate_arg (arg
, REAL_TYPE
))
8022 /* Optimize lround of constant value. */
8023 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8025 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8027 if (real_isfinite (&x
))
8029 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8030 tree ftype
= TREE_TYPE (arg
);
8034 switch (DECL_FUNCTION_CODE (fndecl
))
8036 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8037 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8038 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8039 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8042 CASE_FLT_FN (BUILT_IN_ICEIL
):
8043 CASE_FLT_FN (BUILT_IN_LCEIL
):
8044 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8045 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8048 CASE_FLT_FN (BUILT_IN_IROUND
):
8049 CASE_FLT_FN (BUILT_IN_LROUND
):
8050 CASE_FLT_FN (BUILT_IN_LLROUND
):
8051 real_round (&r
, TYPE_MODE (ftype
), &x
);
8058 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
8059 if (double_int_fits_to_tree_p (itype
, val
))
8060 return double_int_to_tree (itype
, val
);
8064 switch (DECL_FUNCTION_CODE (fndecl
))
8066 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8067 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8068 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8069 if (tree_expr_nonnegative_p (arg
))
8070 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8071 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8076 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8079 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8080 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8081 the argument to the call. Return NULL_TREE if no simplification can
8085 fold_builtin_bitop (tree fndecl
, tree arg
)
8087 if (!validate_arg (arg
, INTEGER_TYPE
))
8090 /* Optimize for constant argument. */
8091 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8093 HOST_WIDE_INT hi
, width
, result
;
8094 unsigned HOST_WIDE_INT lo
;
8097 type
= TREE_TYPE (arg
);
8098 width
= TYPE_PRECISION (type
);
8099 lo
= TREE_INT_CST_LOW (arg
);
8101 /* Clear all the bits that are beyond the type's precision. */
8102 if (width
> HOST_BITS_PER_WIDE_INT
)
8104 hi
= TREE_INT_CST_HIGH (arg
);
8105 if (width
< HOST_BITS_PER_DOUBLE_INT
)
8106 hi
&= ~((unsigned HOST_WIDE_INT
) (-1)
8107 << (width
- HOST_BITS_PER_WIDE_INT
));
8112 if (width
< HOST_BITS_PER_WIDE_INT
)
8113 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
8116 switch (DECL_FUNCTION_CODE (fndecl
))
8118 CASE_INT_FN (BUILT_IN_FFS
):
8120 result
= ffs_hwi (lo
);
8122 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
8127 CASE_INT_FN (BUILT_IN_CLZ
):
8129 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8131 result
= width
- floor_log2 (lo
) - 1;
8132 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8136 CASE_INT_FN (BUILT_IN_CTZ
):
8138 result
= ctz_hwi (lo
);
8140 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
8141 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8145 CASE_INT_FN (BUILT_IN_CLRSB
):
8146 if (width
> HOST_BITS_PER_WIDE_INT
8147 && (hi
& ((unsigned HOST_WIDE_INT
) 1
8148 << (width
- HOST_BITS_PER_WIDE_INT
- 1))) != 0)
8150 hi
= ~hi
& ~((unsigned HOST_WIDE_INT
) (-1)
8151 << (width
- HOST_BITS_PER_WIDE_INT
- 1));
8154 else if (width
<= HOST_BITS_PER_WIDE_INT
8155 && (lo
& ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
8156 lo
= ~lo
& ~((unsigned HOST_WIDE_INT
) (-1) << (width
- 1));
8158 result
= width
- floor_log2 (hi
) - 2 - HOST_BITS_PER_WIDE_INT
;
8160 result
= width
- floor_log2 (lo
) - 2;
8165 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8168 result
++, lo
&= lo
- 1;
8170 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8173 CASE_INT_FN (BUILT_IN_PARITY
):
8176 result
++, lo
&= lo
- 1;
8178 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8186 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8192 /* Fold function call to builtin_bswap and the short, long and long long
8193 variants. Return NULL_TREE if no simplification can be made. */
8195 fold_builtin_bswap (tree fndecl
, tree arg
)
8197 if (! validate_arg (arg
, INTEGER_TYPE
))
8200 /* Optimize constant value. */
8201 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8203 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8204 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8205 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8207 width
= TYPE_PRECISION (type
);
8208 lo
= TREE_INT_CST_LOW (arg
);
8209 hi
= TREE_INT_CST_HIGH (arg
);
8211 switch (DECL_FUNCTION_CODE (fndecl
))
8213 case BUILT_IN_BSWAP16
:
8214 case BUILT_IN_BSWAP32
:
8215 case BUILT_IN_BSWAP64
:
8219 for (s
= 0; s
< width
; s
+= 8)
8221 int d
= width
- s
- 8;
8222 unsigned HOST_WIDE_INT byte
;
8224 if (s
< HOST_BITS_PER_WIDE_INT
)
8225 byte
= (lo
>> s
) & 0xff;
8227 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8229 if (d
< HOST_BITS_PER_WIDE_INT
)
8232 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8242 if (width
< HOST_BITS_PER_WIDE_INT
)
8243 return build_int_cst (type
, r_lo
);
8245 return build_int_cst_wide (type
, r_lo
, r_hi
);
8251 /* A subroutine of fold_builtin to fold the various logarithmic
8252 functions. Return NULL_TREE if no simplification can me made.
8253 FUNC is the corresponding MPFR logarithm function. */
8256 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8257 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8259 if (validate_arg (arg
, REAL_TYPE
))
8261 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8263 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8265 /* Calculate the result when the argument is a constant. */
8266 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8269 /* Special case, optimize logN(expN(x)) = x. */
8270 if (flag_unsafe_math_optimizations
8271 && ((func
== mpfr_log
8272 && (fcode
== BUILT_IN_EXP
8273 || fcode
== BUILT_IN_EXPF
8274 || fcode
== BUILT_IN_EXPL
))
8275 || (func
== mpfr_log2
8276 && (fcode
== BUILT_IN_EXP2
8277 || fcode
== BUILT_IN_EXP2F
8278 || fcode
== BUILT_IN_EXP2L
))
8279 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8280 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8282 /* Optimize logN(func()) for various exponential functions. We
8283 want to determine the value "x" and the power "exponent" in
8284 order to transform logN(x**exponent) into exponent*logN(x). */
8285 if (flag_unsafe_math_optimizations
)
8287 tree exponent
= 0, x
= 0;
8291 CASE_FLT_FN (BUILT_IN_EXP
):
8292 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8293 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8295 exponent
= CALL_EXPR_ARG (arg
, 0);
8297 CASE_FLT_FN (BUILT_IN_EXP2
):
8298 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8299 x
= build_real (type
, dconst2
);
8300 exponent
= CALL_EXPR_ARG (arg
, 0);
8302 CASE_FLT_FN (BUILT_IN_EXP10
):
8303 CASE_FLT_FN (BUILT_IN_POW10
):
8304 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8306 REAL_VALUE_TYPE dconst10
;
8307 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8308 x
= build_real (type
, dconst10
);
8310 exponent
= CALL_EXPR_ARG (arg
, 0);
8312 CASE_FLT_FN (BUILT_IN_SQRT
):
8313 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8314 x
= CALL_EXPR_ARG (arg
, 0);
8315 exponent
= build_real (type
, dconsthalf
);
8317 CASE_FLT_FN (BUILT_IN_CBRT
):
8318 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8319 x
= CALL_EXPR_ARG (arg
, 0);
8320 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8323 CASE_FLT_FN (BUILT_IN_POW
):
8324 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8325 x
= CALL_EXPR_ARG (arg
, 0);
8326 exponent
= CALL_EXPR_ARG (arg
, 1);
8332 /* Now perform the optimization. */
8335 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8336 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8344 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8345 NULL_TREE if no simplification can be made. */
8348 fold_builtin_hypot (location_t loc
, tree fndecl
,
8349 tree arg0
, tree arg1
, tree type
)
8351 tree res
, narg0
, narg1
;
8353 if (!validate_arg (arg0
, REAL_TYPE
)
8354 || !validate_arg (arg1
, REAL_TYPE
))
8357 /* Calculate the result when the argument is a constant. */
8358 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8361 /* If either argument to hypot has a negate or abs, strip that off.
8362 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8363 narg0
= fold_strip_sign_ops (arg0
);
8364 narg1
= fold_strip_sign_ops (arg1
);
8367 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8368 narg1
? narg1
: arg1
);
8371 /* If either argument is zero, hypot is fabs of the other. */
8372 if (real_zerop (arg0
))
8373 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8374 else if (real_zerop (arg1
))
8375 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8377 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8378 if (flag_unsafe_math_optimizations
8379 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8381 const REAL_VALUE_TYPE sqrt2_trunc
8382 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8383 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8384 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8385 build_real (type
, sqrt2_trunc
));
8392 /* Fold a builtin function call to pow, powf, or powl. Return
8393 NULL_TREE if no simplification can be made. */
8395 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8399 if (!validate_arg (arg0
, REAL_TYPE
)
8400 || !validate_arg (arg1
, REAL_TYPE
))
8403 /* Calculate the result when the argument is a constant. */
8404 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8407 /* Optimize pow(1.0,y) = 1.0. */
8408 if (real_onep (arg0
))
8409 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8411 if (TREE_CODE (arg1
) == REAL_CST
8412 && !TREE_OVERFLOW (arg1
))
8414 REAL_VALUE_TYPE cint
;
8418 c
= TREE_REAL_CST (arg1
);
8420 /* Optimize pow(x,0.0) = 1.0. */
8421 if (REAL_VALUES_EQUAL (c
, dconst0
))
8422 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8425 /* Optimize pow(x,1.0) = x. */
8426 if (REAL_VALUES_EQUAL (c
, dconst1
))
8429 /* Optimize pow(x,-1.0) = 1.0/x. */
8430 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8431 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8432 build_real (type
, dconst1
), arg0
);
8434 /* Optimize pow(x,0.5) = sqrt(x). */
8435 if (flag_unsafe_math_optimizations
8436 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8438 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8440 if (sqrtfn
!= NULL_TREE
)
8441 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8444 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8445 if (flag_unsafe_math_optimizations
)
8447 const REAL_VALUE_TYPE dconstroot
8448 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8450 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8452 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8453 if (cbrtfn
!= NULL_TREE
)
8454 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8458 /* Check for an integer exponent. */
8459 n
= real_to_integer (&c
);
8460 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8461 if (real_identical (&c
, &cint
))
8463 /* Attempt to evaluate pow at compile-time, unless this should
8464 raise an exception. */
8465 if (TREE_CODE (arg0
) == REAL_CST
8466 && !TREE_OVERFLOW (arg0
)
8468 || (!flag_trapping_math
&& !flag_errno_math
)
8469 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8474 x
= TREE_REAL_CST (arg0
);
8475 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8476 if (flag_unsafe_math_optimizations
|| !inexact
)
8477 return build_real (type
, x
);
8480 /* Strip sign ops from even integer powers. */
8481 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8483 tree narg0
= fold_strip_sign_ops (arg0
);
8485 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8490 if (flag_unsafe_math_optimizations
)
8492 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8494 /* Optimize pow(expN(x),y) = expN(x*y). */
8495 if (BUILTIN_EXPONENT_P (fcode
))
8497 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8498 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8499 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8500 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8503 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8504 if (BUILTIN_SQRT_P (fcode
))
8506 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8507 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8508 build_real (type
, dconsthalf
));
8509 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8512 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8513 if (BUILTIN_CBRT_P (fcode
))
8515 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8516 if (tree_expr_nonnegative_p (arg
))
8518 const REAL_VALUE_TYPE dconstroot
8519 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8520 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8521 build_real (type
, dconstroot
));
8522 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8526 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8527 if (fcode
== BUILT_IN_POW
8528 || fcode
== BUILT_IN_POWF
8529 || fcode
== BUILT_IN_POWL
)
8531 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8532 if (tree_expr_nonnegative_p (arg00
))
8534 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8535 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8536 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8544 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8545 Return NULL_TREE if no simplification can be made. */
8547 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8548 tree arg0
, tree arg1
, tree type
)
8550 if (!validate_arg (arg0
, REAL_TYPE
)
8551 || !validate_arg (arg1
, INTEGER_TYPE
))
8554 /* Optimize pow(1.0,y) = 1.0. */
8555 if (real_onep (arg0
))
8556 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8558 if (host_integerp (arg1
, 0))
8560 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8562 /* Evaluate powi at compile-time. */
8563 if (TREE_CODE (arg0
) == REAL_CST
8564 && !TREE_OVERFLOW (arg0
))
8567 x
= TREE_REAL_CST (arg0
);
8568 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8569 return build_real (type
, x
);
8572 /* Optimize pow(x,0) = 1.0. */
8574 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8577 /* Optimize pow(x,1) = x. */
8581 /* Optimize pow(x,-1) = 1.0/x. */
8583 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8584 build_real (type
, dconst1
), arg0
);
8590 /* A subroutine of fold_builtin to fold the various exponent
8591 functions. Return NULL_TREE if no simplification can be made.
8592 FUNC is the corresponding MPFR exponent function. */
8595 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8596 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8598 if (validate_arg (arg
, REAL_TYPE
))
8600 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8603 /* Calculate the result when the argument is a constant. */
8604 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8607 /* Optimize expN(logN(x)) = x. */
8608 if (flag_unsafe_math_optimizations
)
8610 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8612 if ((func
== mpfr_exp
8613 && (fcode
== BUILT_IN_LOG
8614 || fcode
== BUILT_IN_LOGF
8615 || fcode
== BUILT_IN_LOGL
))
8616 || (func
== mpfr_exp2
8617 && (fcode
== BUILT_IN_LOG2
8618 || fcode
== BUILT_IN_LOG2F
8619 || fcode
== BUILT_IN_LOG2L
))
8620 || (func
== mpfr_exp10
8621 && (fcode
== BUILT_IN_LOG10
8622 || fcode
== BUILT_IN_LOG10F
8623 || fcode
== BUILT_IN_LOG10L
)))
8624 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8631 /* Return true if VAR is a VAR_DECL or a component thereof. */
8634 var_decl_component_p (tree var
)
8637 while (handled_component_p (inner
))
8638 inner
= TREE_OPERAND (inner
, 0);
8639 return SSA_VAR_P (inner
);
8642 /* Fold function call to builtin memset. Return
8643 NULL_TREE if no simplification can be made. */
8646 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8647 tree type
, bool ignore
)
8649 tree var
, ret
, etype
;
8650 unsigned HOST_WIDE_INT length
, cval
;
8652 if (! validate_arg (dest
, POINTER_TYPE
)
8653 || ! validate_arg (c
, INTEGER_TYPE
)
8654 || ! validate_arg (len
, INTEGER_TYPE
))
8657 if (! host_integerp (len
, 1))
8660 /* If the LEN parameter is zero, return DEST. */
8661 if (integer_zerop (len
))
8662 return omit_one_operand_loc (loc
, type
, dest
, c
);
8664 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8669 if (TREE_CODE (var
) != ADDR_EXPR
)
8672 var
= TREE_OPERAND (var
, 0);
8673 if (TREE_THIS_VOLATILE (var
))
8676 etype
= TREE_TYPE (var
);
8677 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8678 etype
= TREE_TYPE (etype
);
8680 if (!INTEGRAL_TYPE_P (etype
)
8681 && !POINTER_TYPE_P (etype
))
8684 if (! var_decl_component_p (var
))
8687 length
= tree_low_cst (len
, 1);
8688 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8689 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8692 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8695 if (integer_zerop (c
))
8699 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8702 cval
= TREE_INT_CST_LOW (c
);
8706 cval
|= (cval
<< 31) << 1;
8709 ret
= build_int_cst_type (etype
, cval
);
8710 var
= build_fold_indirect_ref_loc (loc
,
8711 fold_convert_loc (loc
,
8712 build_pointer_type (etype
),
8714 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8718 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8721 /* Fold function call to builtin memset. Return
8722 NULL_TREE if no simplification can be made. */
8725 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8727 if (! validate_arg (dest
, POINTER_TYPE
)
8728 || ! validate_arg (size
, INTEGER_TYPE
))
8734 /* New argument list transforming bzero(ptr x, int y) to
8735 memset(ptr x, int 0, size_t y). This is done this way
8736 so that if it isn't expanded inline, we fallback to
8737 calling bzero instead of memset. */
8739 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8740 fold_convert_loc (loc
, size_type_node
, size
),
8741 void_type_node
, ignore
);
8744 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8745 NULL_TREE if no simplification can be made.
8746 If ENDP is 0, return DEST (like memcpy).
8747 If ENDP is 1, return DEST+LEN (like mempcpy).
8748 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8749 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8753 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8754 tree len
, tree type
, bool ignore
, int endp
)
8756 tree destvar
, srcvar
, expr
;
8758 if (! validate_arg (dest
, POINTER_TYPE
)
8759 || ! validate_arg (src
, POINTER_TYPE
)
8760 || ! validate_arg (len
, INTEGER_TYPE
))
8763 /* If the LEN parameter is zero, return DEST. */
8764 if (integer_zerop (len
))
8765 return omit_one_operand_loc (loc
, type
, dest
, src
);
8767 /* If SRC and DEST are the same (and not volatile), return
8768 DEST{,+LEN,+LEN-1}. */
8769 if (operand_equal_p (src
, dest
, 0))
8773 tree srctype
, desttype
;
8774 unsigned int src_align
, dest_align
;
8779 src_align
= get_pointer_alignment (src
);
8780 dest_align
= get_pointer_alignment (dest
);
8782 /* Both DEST and SRC must be pointer types.
8783 ??? This is what old code did. Is the testing for pointer types
8786 If either SRC is readonly or length is 1, we can use memcpy. */
8787 if (!dest_align
|| !src_align
)
8789 if (readonly_data_expr (src
)
8790 || (host_integerp (len
, 1)
8791 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8792 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8794 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8797 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8800 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8801 if (TREE_CODE (src
) == ADDR_EXPR
8802 && TREE_CODE (dest
) == ADDR_EXPR
)
8804 tree src_base
, dest_base
, fn
;
8805 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8806 HOST_WIDE_INT size
= -1;
8807 HOST_WIDE_INT maxsize
= -1;
8809 srcvar
= TREE_OPERAND (src
, 0);
8810 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8812 destvar
= TREE_OPERAND (dest
, 0);
8813 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8815 if (host_integerp (len
, 1))
8816 maxsize
= tree_low_cst (len
, 1);
8819 src_offset
/= BITS_PER_UNIT
;
8820 dest_offset
/= BITS_PER_UNIT
;
8821 if (SSA_VAR_P (src_base
)
8822 && SSA_VAR_P (dest_base
))
8824 if (operand_equal_p (src_base
, dest_base
, 0)
8825 && ranges_overlap_p (src_offset
, maxsize
,
8826 dest_offset
, maxsize
))
8829 else if (TREE_CODE (src_base
) == MEM_REF
8830 && TREE_CODE (dest_base
) == MEM_REF
)
8833 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8834 TREE_OPERAND (dest_base
, 0), 0))
8836 off
= mem_ref_offset (src_base
) +
8837 double_int::from_shwi (src_offset
);
8838 if (!off
.fits_shwi ())
8840 src_offset
= off
.low
;
8841 off
= mem_ref_offset (dest_base
) +
8842 double_int::from_shwi (dest_offset
);
8843 if (!off
.fits_shwi ())
8845 dest_offset
= off
.low
;
8846 if (ranges_overlap_p (src_offset
, maxsize
,
8847 dest_offset
, maxsize
))
8853 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8856 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8859 /* If the destination and source do not alias optimize into
8861 if ((is_gimple_min_invariant (dest
)
8862 || TREE_CODE (dest
) == SSA_NAME
)
8863 && (is_gimple_min_invariant (src
)
8864 || TREE_CODE (src
) == SSA_NAME
))
8867 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8868 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8869 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8872 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8875 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8882 if (!host_integerp (len
, 0))
8885 This logic lose for arguments like (type *)malloc (sizeof (type)),
8886 since we strip the casts of up to VOID return value from malloc.
8887 Perhaps we ought to inherit type from non-VOID argument here? */
8890 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8891 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8894 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8896 tree tem
= TREE_OPERAND (src
, 0);
8898 if (tem
!= TREE_OPERAND (src
, 0))
8899 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8901 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8903 tree tem
= TREE_OPERAND (dest
, 0);
8905 if (tem
!= TREE_OPERAND (dest
, 0))
8906 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8908 srctype
= TREE_TYPE (TREE_TYPE (src
));
8909 if (TREE_CODE (srctype
) == ARRAY_TYPE
8910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8912 srctype
= TREE_TYPE (srctype
);
8914 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8916 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8917 if (TREE_CODE (desttype
) == ARRAY_TYPE
8918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8920 desttype
= TREE_TYPE (desttype
);
8922 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8924 if (TREE_ADDRESSABLE (srctype
)
8925 || TREE_ADDRESSABLE (desttype
))
8928 src_align
= get_pointer_alignment (src
);
8929 dest_align
= get_pointer_alignment (dest
);
8930 if (dest_align
< TYPE_ALIGN (desttype
)
8931 || src_align
< TYPE_ALIGN (srctype
))
8935 dest
= builtin_save_expr (dest
);
8937 /* Build accesses at offset zero with a ref-all character type. */
8938 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8939 ptr_mode
, true), 0);
8942 STRIP_NOPS (destvar
);
8943 if (TREE_CODE (destvar
) == ADDR_EXPR
8944 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8945 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8946 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8948 destvar
= NULL_TREE
;
8951 STRIP_NOPS (srcvar
);
8952 if (TREE_CODE (srcvar
) == ADDR_EXPR
8953 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8954 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8957 || src_align
>= TYPE_ALIGN (desttype
))
8958 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8960 else if (!STRICT_ALIGNMENT
)
8962 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8964 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8972 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8975 if (srcvar
== NULL_TREE
)
8978 if (src_align
>= TYPE_ALIGN (desttype
))
8979 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8982 if (STRICT_ALIGNMENT
)
8984 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8986 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
8989 else if (destvar
== NULL_TREE
)
8992 if (dest_align
>= TYPE_ALIGN (srctype
))
8993 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8996 if (STRICT_ALIGNMENT
)
8998 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
9000 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
9004 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
9010 if (endp
== 0 || endp
== 3)
9011 return omit_one_operand_loc (loc
, type
, dest
, expr
);
9017 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
9020 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9021 dest
= fold_convert_loc (loc
, type
, dest
);
9023 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
9027 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9028 If LEN is not NULL, it represents the length of the string to be
9029 copied. Return NULL_TREE if no simplification can be made. */
9032 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
9036 if (!validate_arg (dest
, POINTER_TYPE
)
9037 || !validate_arg (src
, POINTER_TYPE
))
9040 /* If SRC and DEST are the same (and not volatile), return DEST. */
9041 if (operand_equal_p (src
, dest
, 0))
9042 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
9044 if (optimize_function_for_size_p (cfun
))
9047 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9053 len
= c_strlen (src
, 1);
9054 if (! len
|| TREE_SIDE_EFFECTS (len
))
9058 len
= fold_convert_loc (loc
, size_type_node
, len
);
9059 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
9060 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9061 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9064 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9065 Return NULL_TREE if no simplification can be made. */
9068 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
9070 tree fn
, len
, lenp1
, call
, type
;
9072 if (!validate_arg (dest
, POINTER_TYPE
)
9073 || !validate_arg (src
, POINTER_TYPE
))
9076 len
= c_strlen (src
, 1);
9078 || TREE_CODE (len
) != INTEGER_CST
)
9081 if (optimize_function_for_size_p (cfun
)
9082 /* If length is zero it's small enough. */
9083 && !integer_zerop (len
))
9086 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9090 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
9091 fold_convert_loc (loc
, size_type_node
, len
),
9092 build_int_cst (size_type_node
, 1));
9093 /* We use dest twice in building our expression. Save it from
9094 multiple expansions. */
9095 dest
= builtin_save_expr (dest
);
9096 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
9098 type
= TREE_TYPE (TREE_TYPE (fndecl
));
9099 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9100 dest
= fold_convert_loc (loc
, type
, dest
);
9101 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
9105 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9106 If SLEN is not NULL, it represents the length of the source string.
9107 Return NULL_TREE if no simplification can be made. */
9110 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
9111 tree src
, tree len
, tree slen
)
9115 if (!validate_arg (dest
, POINTER_TYPE
)
9116 || !validate_arg (src
, POINTER_TYPE
)
9117 || !validate_arg (len
, INTEGER_TYPE
))
9120 /* If the LEN parameter is zero, return DEST. */
9121 if (integer_zerop (len
))
9122 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
9124 /* We can't compare slen with len as constants below if len is not a
9126 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
9130 slen
= c_strlen (src
, 1);
9132 /* Now, we must be passed a constant src ptr parameter. */
9133 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
9136 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
9138 /* We do not support simplification of this case, though we do
9139 support it when expanding trees into RTL. */
9140 /* FIXME: generate a call to __builtin_memset. */
9141 if (tree_int_cst_lt (slen
, len
))
9144 /* OK transform into builtin memcpy. */
9145 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9149 len
= fold_convert_loc (loc
, size_type_node
, len
);
9150 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9151 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9154 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9155 arguments to the call, and TYPE is its return type.
9156 Return NULL_TREE if no simplification can be made. */
9159 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9161 if (!validate_arg (arg1
, POINTER_TYPE
)
9162 || !validate_arg (arg2
, INTEGER_TYPE
)
9163 || !validate_arg (len
, INTEGER_TYPE
))
9169 if (TREE_CODE (arg2
) != INTEGER_CST
9170 || !host_integerp (len
, 1))
9173 p1
= c_getstr (arg1
);
9174 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9180 if (target_char_cast (arg2
, &c
))
9183 r
= (const char *) memchr (p1
, c
, tree_low_cst (len
, 1));
9186 return build_int_cst (TREE_TYPE (arg1
), 0);
9188 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9189 return fold_convert_loc (loc
, type
, tem
);
9195 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9196 Return NULL_TREE if no simplification can be made. */
9199 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9201 const char *p1
, *p2
;
9203 if (!validate_arg (arg1
, POINTER_TYPE
)
9204 || !validate_arg (arg2
, POINTER_TYPE
)
9205 || !validate_arg (len
, INTEGER_TYPE
))
9208 /* If the LEN parameter is zero, return zero. */
9209 if (integer_zerop (len
))
9210 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9213 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9214 if (operand_equal_p (arg1
, arg2
, 0))
9215 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9217 p1
= c_getstr (arg1
);
9218 p2
= c_getstr (arg2
);
9220 /* If all arguments are constant, and the value of len is not greater
9221 than the lengths of arg1 and arg2, evaluate at compile-time. */
9222 if (host_integerp (len
, 1) && p1
&& p2
9223 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9224 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9226 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9229 return integer_one_node
;
9231 return integer_minus_one_node
;
9233 return integer_zero_node
;
9236 /* If len parameter is one, return an expression corresponding to
9237 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9238 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9240 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9241 tree cst_uchar_ptr_node
9242 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9245 = fold_convert_loc (loc
, integer_type_node
,
9246 build1 (INDIRECT_REF
, cst_uchar_node
,
9247 fold_convert_loc (loc
,
9251 = fold_convert_loc (loc
, integer_type_node
,
9252 build1 (INDIRECT_REF
, cst_uchar_node
,
9253 fold_convert_loc (loc
,
9256 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9262 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9263 Return NULL_TREE if no simplification can be made. */
9266 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9268 const char *p1
, *p2
;
9270 if (!validate_arg (arg1
, POINTER_TYPE
)
9271 || !validate_arg (arg2
, POINTER_TYPE
))
9274 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9275 if (operand_equal_p (arg1
, arg2
, 0))
9276 return integer_zero_node
;
9278 p1
= c_getstr (arg1
);
9279 p2
= c_getstr (arg2
);
9283 const int i
= strcmp (p1
, p2
);
9285 return integer_minus_one_node
;
9287 return integer_one_node
;
9289 return integer_zero_node
;
9292 /* If the second arg is "", return *(const unsigned char*)arg1. */
9293 if (p2
&& *p2
== '\0')
9295 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9296 tree cst_uchar_ptr_node
9297 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9299 return fold_convert_loc (loc
, integer_type_node
,
9300 build1 (INDIRECT_REF
, cst_uchar_node
,
9301 fold_convert_loc (loc
,
9306 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9307 if (p1
&& *p1
== '\0')
9309 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9310 tree cst_uchar_ptr_node
9311 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9314 = fold_convert_loc (loc
, integer_type_node
,
9315 build1 (INDIRECT_REF
, cst_uchar_node
,
9316 fold_convert_loc (loc
,
9319 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9325 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9326 Return NULL_TREE if no simplification can be made. */
9329 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9331 const char *p1
, *p2
;
9333 if (!validate_arg (arg1
, POINTER_TYPE
)
9334 || !validate_arg (arg2
, POINTER_TYPE
)
9335 || !validate_arg (len
, INTEGER_TYPE
))
9338 /* If the LEN parameter is zero, return zero. */
9339 if (integer_zerop (len
))
9340 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9343 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9344 if (operand_equal_p (arg1
, arg2
, 0))
9345 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9347 p1
= c_getstr (arg1
);
9348 p2
= c_getstr (arg2
);
9350 if (host_integerp (len
, 1) && p1
&& p2
)
9352 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9354 return integer_one_node
;
9356 return integer_minus_one_node
;
9358 return integer_zero_node
;
9361 /* If the second arg is "", and the length is greater than zero,
9362 return *(const unsigned char*)arg1. */
9363 if (p2
&& *p2
== '\0'
9364 && TREE_CODE (len
) == INTEGER_CST
9365 && tree_int_cst_sgn (len
) == 1)
9367 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9368 tree cst_uchar_ptr_node
9369 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9371 return fold_convert_loc (loc
, integer_type_node
,
9372 build1 (INDIRECT_REF
, cst_uchar_node
,
9373 fold_convert_loc (loc
,
9378 /* If the first arg is "", and the length is greater than zero,
9379 return -*(const unsigned char*)arg2. */
9380 if (p1
&& *p1
== '\0'
9381 && TREE_CODE (len
) == INTEGER_CST
9382 && tree_int_cst_sgn (len
) == 1)
9384 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9385 tree cst_uchar_ptr_node
9386 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9388 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9389 build1 (INDIRECT_REF
, cst_uchar_node
,
9390 fold_convert_loc (loc
,
9393 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9396 /* If len parameter is one, return an expression corresponding to
9397 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9398 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9400 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9401 tree cst_uchar_ptr_node
9402 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9404 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9405 build1 (INDIRECT_REF
, cst_uchar_node
,
9406 fold_convert_loc (loc
,
9409 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9410 build1 (INDIRECT_REF
, cst_uchar_node
,
9411 fold_convert_loc (loc
,
9414 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9420 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9421 ARG. Return NULL_TREE if no simplification can be made. */
9424 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9426 if (!validate_arg (arg
, REAL_TYPE
))
9429 /* If ARG is a compile-time constant, determine the result. */
9430 if (TREE_CODE (arg
) == REAL_CST
9431 && !TREE_OVERFLOW (arg
))
9435 c
= TREE_REAL_CST (arg
);
9436 return (REAL_VALUE_NEGATIVE (c
)
9437 ? build_one_cst (type
)
9438 : build_zero_cst (type
));
9441 /* If ARG is non-negative, the result is always zero. */
9442 if (tree_expr_nonnegative_p (arg
))
9443 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9445 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9446 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9447 return fold_convert (type
,
9448 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9449 build_real (TREE_TYPE (arg
), dconst0
)));
9454 /* Fold function call to builtin copysign, copysignf or copysignl with
9455 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9459 fold_builtin_copysign (location_t loc
, tree fndecl
,
9460 tree arg1
, tree arg2
, tree type
)
9464 if (!validate_arg (arg1
, REAL_TYPE
)
9465 || !validate_arg (arg2
, REAL_TYPE
))
9468 /* copysign(X,X) is X. */
9469 if (operand_equal_p (arg1
, arg2
, 0))
9470 return fold_convert_loc (loc
, type
, arg1
);
9472 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9473 if (TREE_CODE (arg1
) == REAL_CST
9474 && TREE_CODE (arg2
) == REAL_CST
9475 && !TREE_OVERFLOW (arg1
)
9476 && !TREE_OVERFLOW (arg2
))
9478 REAL_VALUE_TYPE c1
, c2
;
9480 c1
= TREE_REAL_CST (arg1
);
9481 c2
= TREE_REAL_CST (arg2
);
9482 /* c1.sign := c2.sign. */
9483 real_copysign (&c1
, &c2
);
9484 return build_real (type
, c1
);
9487 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9488 Remember to evaluate Y for side-effects. */
9489 if (tree_expr_nonnegative_p (arg2
))
9490 return omit_one_operand_loc (loc
, type
,
9491 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9494 /* Strip sign changing operations for the first argument. */
9495 tem
= fold_strip_sign_ops (arg1
);
9497 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9502 /* Fold a call to builtin isascii with argument ARG. */
9505 fold_builtin_isascii (location_t loc
, tree arg
)
9507 if (!validate_arg (arg
, INTEGER_TYPE
))
9511 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9512 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9513 build_int_cst (integer_type_node
,
9514 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9515 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9516 arg
, integer_zero_node
);
9520 /* Fold a call to builtin toascii with argument ARG. */
9523 fold_builtin_toascii (location_t loc
, tree arg
)
9525 if (!validate_arg (arg
, INTEGER_TYPE
))
9528 /* Transform toascii(c) -> (c & 0x7f). */
9529 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9530 build_int_cst (integer_type_node
, 0x7f));
9533 /* Fold a call to builtin isdigit with argument ARG. */
9536 fold_builtin_isdigit (location_t loc
, tree arg
)
9538 if (!validate_arg (arg
, INTEGER_TYPE
))
9542 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9543 /* According to the C standard, isdigit is unaffected by locale.
9544 However, it definitely is affected by the target character set. */
9545 unsigned HOST_WIDE_INT target_digit0
9546 = lang_hooks
.to_target_charset ('0');
9548 if (target_digit0
== 0)
9551 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9552 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9553 build_int_cst (unsigned_type_node
, target_digit0
));
9554 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9555 build_int_cst (unsigned_type_node
, 9));
9559 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9562 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9564 if (!validate_arg (arg
, REAL_TYPE
))
9567 arg
= fold_convert_loc (loc
, type
, arg
);
9568 if (TREE_CODE (arg
) == REAL_CST
)
9569 return fold_abs_const (arg
, type
);
9570 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9573 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9576 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9578 if (!validate_arg (arg
, INTEGER_TYPE
))
9581 arg
= fold_convert_loc (loc
, type
, arg
);
9582 if (TREE_CODE (arg
) == INTEGER_CST
)
9583 return fold_abs_const (arg
, type
);
9584 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9587 /* Fold a fma operation with arguments ARG[012]. */
9590 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9591 tree type
, tree arg0
, tree arg1
, tree arg2
)
9593 if (TREE_CODE (arg0
) == REAL_CST
9594 && TREE_CODE (arg1
) == REAL_CST
9595 && TREE_CODE (arg2
) == REAL_CST
)
9596 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9601 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9604 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9606 if (validate_arg (arg0
, REAL_TYPE
)
9607 && validate_arg(arg1
, REAL_TYPE
)
9608 && validate_arg(arg2
, REAL_TYPE
))
9610 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9614 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9615 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9616 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9621 /* Fold a call to builtin fmin or fmax. */
9624 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9625 tree type
, bool max
)
9627 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9629 /* Calculate the result when the argument is a constant. */
9630 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9635 /* If either argument is NaN, return the other one. Avoid the
9636 transformation if we get (and honor) a signalling NaN. Using
9637 omit_one_operand() ensures we create a non-lvalue. */
9638 if (TREE_CODE (arg0
) == REAL_CST
9639 && real_isnan (&TREE_REAL_CST (arg0
))
9640 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9641 || ! TREE_REAL_CST (arg0
).signalling
))
9642 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9643 if (TREE_CODE (arg1
) == REAL_CST
9644 && real_isnan (&TREE_REAL_CST (arg1
))
9645 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9646 || ! TREE_REAL_CST (arg1
).signalling
))
9647 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9649 /* Transform fmin/fmax(x,x) -> x. */
9650 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9651 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9653 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9654 functions to return the numeric arg if the other one is NaN.
9655 These tree codes don't honor that, so only transform if
9656 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9657 handled, so we don't have to worry about it either. */
9658 if (flag_finite_math_only
)
9659 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9660 fold_convert_loc (loc
, type
, arg0
),
9661 fold_convert_loc (loc
, type
, arg1
));
9666 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9669 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9671 if (validate_arg (arg
, COMPLEX_TYPE
)
9672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9674 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9678 tree new_arg
= builtin_save_expr (arg
);
9679 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9680 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9681 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9688 /* Fold a call to builtin logb/ilogb. */
9691 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9693 if (! validate_arg (arg
, REAL_TYPE
))
9698 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9700 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9706 /* If arg is Inf or NaN and we're logb, return it. */
9707 if (TREE_CODE (rettype
) == REAL_TYPE
)
9708 return fold_convert_loc (loc
, rettype
, arg
);
9709 /* Fall through... */
9711 /* Zero may set errno and/or raise an exception for logb, also
9712 for ilogb we don't know FP_ILOGB0. */
9715 /* For normal numbers, proceed iff radix == 2. In GCC,
9716 normalized significands are in the range [0.5, 1.0). We
9717 want the exponent as if they were [1.0, 2.0) so get the
9718 exponent and subtract 1. */
9719 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9720 return fold_convert_loc (loc
, rettype
,
9721 build_int_cst (integer_type_node
,
9722 REAL_EXP (value
)-1));
9730 /* Fold a call to builtin significand, if radix == 2. */
9733 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9735 if (! validate_arg (arg
, REAL_TYPE
))
9740 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9742 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9749 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9750 return fold_convert_loc (loc
, rettype
, arg
);
9752 /* For normal numbers, proceed iff radix == 2. */
9753 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9755 REAL_VALUE_TYPE result
= *value
;
9756 /* In GCC, normalized significands are in the range [0.5,
9757 1.0). We want them to be [1.0, 2.0) so set the
9759 SET_REAL_EXP (&result
, 1);
9760 return build_real (rettype
, result
);
9769 /* Fold a call to builtin frexp, we can assume the base is 2. */
9772 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9774 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9779 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9782 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9784 /* Proceed if a valid pointer type was passed in. */
9785 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9787 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9793 /* For +-0, return (*exp = 0, +-0). */
9794 exp
= integer_zero_node
;
9799 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9800 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9803 /* Since the frexp function always expects base 2, and in
9804 GCC normalized significands are already in the range
9805 [0.5, 1.0), we have exactly what frexp wants. */
9806 REAL_VALUE_TYPE frac_rvt
= *value
;
9807 SET_REAL_EXP (&frac_rvt
, 0);
9808 frac
= build_real (rettype
, frac_rvt
);
9809 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9816 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9817 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9818 TREE_SIDE_EFFECTS (arg1
) = 1;
9819 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9825 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9826 then we can assume the base is two. If it's false, then we have to
9827 check the mode of the TYPE parameter in certain cases. */
9830 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9831 tree type
, bool ldexp
)
9833 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9838 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9839 if (real_zerop (arg0
) || integer_zerop (arg1
)
9840 || (TREE_CODE (arg0
) == REAL_CST
9841 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9842 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9844 /* If both arguments are constant, then try to evaluate it. */
9845 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9846 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9847 && host_integerp (arg1
, 0))
9849 /* Bound the maximum adjustment to twice the range of the
9850 mode's valid exponents. Use abs to ensure the range is
9851 positive as a sanity check. */
9852 const long max_exp_adj
= 2 *
9853 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9854 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9856 /* Get the user-requested adjustment. */
9857 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9859 /* The requested adjustment must be inside this range. This
9860 is a preliminary cap to avoid things like overflow, we
9861 may still fail to compute the result for other reasons. */
9862 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9864 REAL_VALUE_TYPE initial_result
;
9866 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9868 /* Ensure we didn't overflow. */
9869 if (! real_isinf (&initial_result
))
9871 const REAL_VALUE_TYPE trunc_result
9872 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9874 /* Only proceed if the target mode can hold the
9876 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9877 return build_real (type
, trunc_result
);
9886 /* Fold a call to builtin modf. */
9889 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9891 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9896 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9899 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9901 /* Proceed if a valid pointer type was passed in. */
9902 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9904 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9905 REAL_VALUE_TYPE trunc
, frac
;
9911 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9912 trunc
= frac
= *value
;
9915 /* For +-Inf, return (*arg1 = arg0, +-0). */
9917 frac
.sign
= value
->sign
;
9921 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9922 real_trunc (&trunc
, VOIDmode
, value
);
9923 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9924 /* If the original number was negative and already
9925 integral, then the fractional part is -0.0. */
9926 if (value
->sign
&& frac
.cl
== rvc_zero
)
9927 frac
.sign
= value
->sign
;
9931 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9932 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9933 build_real (rettype
, trunc
));
9934 TREE_SIDE_EFFECTS (arg1
) = 1;
9935 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9936 build_real (rettype
, frac
));
9942 /* Given a location LOC, an interclass builtin function decl FNDECL
9943 and its single argument ARG, return an folded expression computing
9944 the same, or NULL_TREE if we either couldn't or didn't want to fold
9945 (the latter happen if there's an RTL instruction available). */
9948 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9950 enum machine_mode mode
;
9952 if (!validate_arg (arg
, REAL_TYPE
))
9955 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9958 mode
= TYPE_MODE (TREE_TYPE (arg
));
9960 /* If there is no optab, try generic code. */
9961 switch (DECL_FUNCTION_CODE (fndecl
))
9965 CASE_FLT_FN (BUILT_IN_ISINF
):
9967 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9968 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9969 tree
const type
= TREE_TYPE (arg
);
9973 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9974 real_from_string (&r
, buf
);
9975 result
= build_call_expr (isgr_fn
, 2,
9976 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9977 build_real (type
, r
));
9980 CASE_FLT_FN (BUILT_IN_FINITE
):
9981 case BUILT_IN_ISFINITE
:
9983 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9984 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9985 tree
const type
= TREE_TYPE (arg
);
9989 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9990 real_from_string (&r
, buf
);
9991 result
= build_call_expr (isle_fn
, 2,
9992 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9993 build_real (type
, r
));
9994 /*result = fold_build2_loc (loc, UNGT_EXPR,
9995 TREE_TYPE (TREE_TYPE (fndecl)),
9996 fold_build1_loc (loc, ABS_EXPR, type, arg),
9997 build_real (type, r));
9998 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9999 TREE_TYPE (TREE_TYPE (fndecl)),
10003 case BUILT_IN_ISNORMAL
:
10005 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10006 islessequal(fabs(x),DBL_MAX). */
10007 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10008 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
10009 tree
const type
= TREE_TYPE (arg
);
10010 REAL_VALUE_TYPE rmax
, rmin
;
10013 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10014 real_from_string (&rmax
, buf
);
10015 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10016 real_from_string (&rmin
, buf
);
10017 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10018 result
= build_call_expr (isle_fn
, 2, arg
,
10019 build_real (type
, rmax
));
10020 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
10021 build_call_expr (isge_fn
, 2, arg
,
10022 build_real (type
, rmin
)));
10032 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10033 ARG is the argument for the call. */
10036 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
10038 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10041 if (!validate_arg (arg
, REAL_TYPE
))
10044 switch (builtin_index
)
10046 case BUILT_IN_ISINF
:
10047 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10048 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10050 if (TREE_CODE (arg
) == REAL_CST
)
10052 r
= TREE_REAL_CST (arg
);
10053 if (real_isinf (&r
))
10054 return real_compare (GT_EXPR
, &r
, &dconst0
)
10055 ? integer_one_node
: integer_minus_one_node
;
10057 return integer_zero_node
;
10062 case BUILT_IN_ISINF_SIGN
:
10064 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10065 /* In a boolean context, GCC will fold the inner COND_EXPR to
10066 1. So e.g. "if (isinf_sign(x))" would be folded to just
10067 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10068 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
10069 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
10070 tree tmp
= NULL_TREE
;
10072 arg
= builtin_save_expr (arg
);
10074 if (signbit_fn
&& isinf_fn
)
10076 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
10077 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
10079 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10080 signbit_call
, integer_zero_node
);
10081 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10082 isinf_call
, integer_zero_node
);
10084 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
10085 integer_minus_one_node
, integer_one_node
);
10086 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10088 integer_zero_node
);
10094 case BUILT_IN_ISFINITE
:
10095 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
10096 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10097 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10099 if (TREE_CODE (arg
) == REAL_CST
)
10101 r
= TREE_REAL_CST (arg
);
10102 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
10107 case BUILT_IN_ISNAN
:
10108 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
10109 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10111 if (TREE_CODE (arg
) == REAL_CST
)
10113 r
= TREE_REAL_CST (arg
);
10114 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
10117 arg
= builtin_save_expr (arg
);
10118 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
10121 gcc_unreachable ();
10125 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10126 This builtin will generate code to return the appropriate floating
10127 point classification depending on the value of the floating point
10128 number passed in. The possible return values must be supplied as
10129 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10130 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10131 one floating point argument which is "type generic". */
10134 fold_builtin_fpclassify (location_t loc
, tree exp
)
10136 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10137 arg
, type
, res
, tmp
;
10138 enum machine_mode mode
;
10142 /* Verify the required arguments in the original call. */
10143 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10144 INTEGER_TYPE
, INTEGER_TYPE
,
10145 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10148 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10149 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10150 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10151 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10152 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10153 arg
= CALL_EXPR_ARG (exp
, 5);
10154 type
= TREE_TYPE (arg
);
10155 mode
= TYPE_MODE (type
);
10156 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10158 /* fpclassify(x) ->
10159 isnan(x) ? FP_NAN :
10160 (fabs(x) == Inf ? FP_INFINITE :
10161 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10162 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10164 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10165 build_real (type
, dconst0
));
10166 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10167 tmp
, fp_zero
, fp_subnormal
);
10169 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10170 real_from_string (&r
, buf
);
10171 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10172 arg
, build_real (type
, r
));
10173 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10175 if (HONOR_INFINITIES (mode
))
10178 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10179 build_real (type
, r
));
10180 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10184 if (HONOR_NANS (mode
))
10186 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10187 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10193 /* Fold a call to an unordered comparison function such as
10194 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10195 being called and ARG0 and ARG1 are the arguments for the call.
10196 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10197 the opposite of the desired result. UNORDERED_CODE is used
10198 for modes that can hold NaNs and ORDERED_CODE is used for
10202 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10203 enum tree_code unordered_code
,
10204 enum tree_code ordered_code
)
10206 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10207 enum tree_code code
;
10209 enum tree_code code0
, code1
;
10210 tree cmp_type
= NULL_TREE
;
10212 type0
= TREE_TYPE (arg0
);
10213 type1
= TREE_TYPE (arg1
);
10215 code0
= TREE_CODE (type0
);
10216 code1
= TREE_CODE (type1
);
10218 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10219 /* Choose the wider of two real types. */
10220 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10222 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10224 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10227 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10228 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10230 if (unordered_code
== UNORDERED_EXPR
)
10232 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10233 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10234 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10237 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10239 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10240 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10243 /* Fold a call to built-in function FNDECL with 0 arguments.
10244 IGNORE is true if the result of the function call is ignored. This
10245 function returns NULL_TREE if no simplification was possible. */
10248 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10250 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10251 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10254 CASE_FLT_FN (BUILT_IN_INF
):
10255 case BUILT_IN_INFD32
:
10256 case BUILT_IN_INFD64
:
10257 case BUILT_IN_INFD128
:
10258 return fold_builtin_inf (loc
, type
, true);
10260 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10261 return fold_builtin_inf (loc
, type
, false);
10263 case BUILT_IN_CLASSIFY_TYPE
:
10264 return fold_builtin_classify_type (NULL_TREE
);
10272 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10273 IGNORE is true if the result of the function call is ignored. This
10274 function returns NULL_TREE if no simplification was possible. */
10277 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10279 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10280 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10283 case BUILT_IN_CONSTANT_P
:
10285 tree val
= fold_builtin_constant_p (arg0
);
10287 /* Gimplification will pull the CALL_EXPR for the builtin out of
10288 an if condition. When not optimizing, we'll not CSE it back.
10289 To avoid link error types of regressions, return false now. */
10290 if (!val
&& !optimize
)
10291 val
= integer_zero_node
;
10296 case BUILT_IN_CLASSIFY_TYPE
:
10297 return fold_builtin_classify_type (arg0
);
10299 case BUILT_IN_STRLEN
:
10300 return fold_builtin_strlen (loc
, type
, arg0
);
10302 CASE_FLT_FN (BUILT_IN_FABS
):
10303 return fold_builtin_fabs (loc
, arg0
, type
);
10306 case BUILT_IN_LABS
:
10307 case BUILT_IN_LLABS
:
10308 case BUILT_IN_IMAXABS
:
10309 return fold_builtin_abs (loc
, arg0
, type
);
10311 CASE_FLT_FN (BUILT_IN_CONJ
):
10312 if (validate_arg (arg0
, COMPLEX_TYPE
)
10313 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10314 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10317 CASE_FLT_FN (BUILT_IN_CREAL
):
10318 if (validate_arg (arg0
, COMPLEX_TYPE
)
10319 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10320 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10323 CASE_FLT_FN (BUILT_IN_CIMAG
):
10324 if (validate_arg (arg0
, COMPLEX_TYPE
)
10325 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10326 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10329 CASE_FLT_FN (BUILT_IN_CCOS
):
10330 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10332 CASE_FLT_FN (BUILT_IN_CCOSH
):
10333 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10335 CASE_FLT_FN (BUILT_IN_CPROJ
):
10336 return fold_builtin_cproj(loc
, arg0
, type
);
10338 CASE_FLT_FN (BUILT_IN_CSIN
):
10339 if (validate_arg (arg0
, COMPLEX_TYPE
)
10340 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10341 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10344 CASE_FLT_FN (BUILT_IN_CSINH
):
10345 if (validate_arg (arg0
, COMPLEX_TYPE
)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10347 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10350 CASE_FLT_FN (BUILT_IN_CTAN
):
10351 if (validate_arg (arg0
, COMPLEX_TYPE
)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10353 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10356 CASE_FLT_FN (BUILT_IN_CTANH
):
10357 if (validate_arg (arg0
, COMPLEX_TYPE
)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10359 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10362 CASE_FLT_FN (BUILT_IN_CLOG
):
10363 if (validate_arg (arg0
, COMPLEX_TYPE
)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10365 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10368 CASE_FLT_FN (BUILT_IN_CSQRT
):
10369 if (validate_arg (arg0
, COMPLEX_TYPE
)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10371 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10374 CASE_FLT_FN (BUILT_IN_CASIN
):
10375 if (validate_arg (arg0
, COMPLEX_TYPE
)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10377 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10380 CASE_FLT_FN (BUILT_IN_CACOS
):
10381 if (validate_arg (arg0
, COMPLEX_TYPE
)
10382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10383 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10386 CASE_FLT_FN (BUILT_IN_CATAN
):
10387 if (validate_arg (arg0
, COMPLEX_TYPE
)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10389 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10392 CASE_FLT_FN (BUILT_IN_CASINH
):
10393 if (validate_arg (arg0
, COMPLEX_TYPE
)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10395 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10398 CASE_FLT_FN (BUILT_IN_CACOSH
):
10399 if (validate_arg (arg0
, COMPLEX_TYPE
)
10400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10401 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10404 CASE_FLT_FN (BUILT_IN_CATANH
):
10405 if (validate_arg (arg0
, COMPLEX_TYPE
)
10406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10407 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10410 CASE_FLT_FN (BUILT_IN_CABS
):
10411 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10413 CASE_FLT_FN (BUILT_IN_CARG
):
10414 return fold_builtin_carg (loc
, arg0
, type
);
10416 CASE_FLT_FN (BUILT_IN_SQRT
):
10417 return fold_builtin_sqrt (loc
, arg0
, type
);
10419 CASE_FLT_FN (BUILT_IN_CBRT
):
10420 return fold_builtin_cbrt (loc
, arg0
, type
);
10422 CASE_FLT_FN (BUILT_IN_ASIN
):
10423 if (validate_arg (arg0
, REAL_TYPE
))
10424 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10425 &dconstm1
, &dconst1
, true);
10428 CASE_FLT_FN (BUILT_IN_ACOS
):
10429 if (validate_arg (arg0
, REAL_TYPE
))
10430 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10431 &dconstm1
, &dconst1
, true);
10434 CASE_FLT_FN (BUILT_IN_ATAN
):
10435 if (validate_arg (arg0
, REAL_TYPE
))
10436 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10439 CASE_FLT_FN (BUILT_IN_ASINH
):
10440 if (validate_arg (arg0
, REAL_TYPE
))
10441 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10444 CASE_FLT_FN (BUILT_IN_ACOSH
):
10445 if (validate_arg (arg0
, REAL_TYPE
))
10446 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10447 &dconst1
, NULL
, true);
10450 CASE_FLT_FN (BUILT_IN_ATANH
):
10451 if (validate_arg (arg0
, REAL_TYPE
))
10452 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10453 &dconstm1
, &dconst1
, false);
10456 CASE_FLT_FN (BUILT_IN_SIN
):
10457 if (validate_arg (arg0
, REAL_TYPE
))
10458 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10461 CASE_FLT_FN (BUILT_IN_COS
):
10462 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10464 CASE_FLT_FN (BUILT_IN_TAN
):
10465 return fold_builtin_tan (arg0
, type
);
10467 CASE_FLT_FN (BUILT_IN_CEXP
):
10468 return fold_builtin_cexp (loc
, arg0
, type
);
10470 CASE_FLT_FN (BUILT_IN_CEXPI
):
10471 if (validate_arg (arg0
, REAL_TYPE
))
10472 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10475 CASE_FLT_FN (BUILT_IN_SINH
):
10476 if (validate_arg (arg0
, REAL_TYPE
))
10477 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10480 CASE_FLT_FN (BUILT_IN_COSH
):
10481 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10483 CASE_FLT_FN (BUILT_IN_TANH
):
10484 if (validate_arg (arg0
, REAL_TYPE
))
10485 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10488 CASE_FLT_FN (BUILT_IN_ERF
):
10489 if (validate_arg (arg0
, REAL_TYPE
))
10490 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10493 CASE_FLT_FN (BUILT_IN_ERFC
):
10494 if (validate_arg (arg0
, REAL_TYPE
))
10495 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10498 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10499 if (validate_arg (arg0
, REAL_TYPE
))
10500 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10503 CASE_FLT_FN (BUILT_IN_EXP
):
10504 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10506 CASE_FLT_FN (BUILT_IN_EXP2
):
10507 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10509 CASE_FLT_FN (BUILT_IN_EXP10
):
10510 CASE_FLT_FN (BUILT_IN_POW10
):
10511 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10513 CASE_FLT_FN (BUILT_IN_EXPM1
):
10514 if (validate_arg (arg0
, REAL_TYPE
))
10515 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10518 CASE_FLT_FN (BUILT_IN_LOG
):
10519 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10521 CASE_FLT_FN (BUILT_IN_LOG2
):
10522 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10524 CASE_FLT_FN (BUILT_IN_LOG10
):
10525 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10527 CASE_FLT_FN (BUILT_IN_LOG1P
):
10528 if (validate_arg (arg0
, REAL_TYPE
))
10529 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10530 &dconstm1
, NULL
, false);
10533 CASE_FLT_FN (BUILT_IN_J0
):
10534 if (validate_arg (arg0
, REAL_TYPE
))
10535 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10539 CASE_FLT_FN (BUILT_IN_J1
):
10540 if (validate_arg (arg0
, REAL_TYPE
))
10541 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10545 CASE_FLT_FN (BUILT_IN_Y0
):
10546 if (validate_arg (arg0
, REAL_TYPE
))
10547 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10548 &dconst0
, NULL
, false);
10551 CASE_FLT_FN (BUILT_IN_Y1
):
10552 if (validate_arg (arg0
, REAL_TYPE
))
10553 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10554 &dconst0
, NULL
, false);
10557 CASE_FLT_FN (BUILT_IN_NAN
):
10558 case BUILT_IN_NAND32
:
10559 case BUILT_IN_NAND64
:
10560 case BUILT_IN_NAND128
:
10561 return fold_builtin_nan (arg0
, type
, true);
10563 CASE_FLT_FN (BUILT_IN_NANS
):
10564 return fold_builtin_nan (arg0
, type
, false);
10566 CASE_FLT_FN (BUILT_IN_FLOOR
):
10567 return fold_builtin_floor (loc
, fndecl
, arg0
);
10569 CASE_FLT_FN (BUILT_IN_CEIL
):
10570 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10572 CASE_FLT_FN (BUILT_IN_TRUNC
):
10573 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10575 CASE_FLT_FN (BUILT_IN_ROUND
):
10576 return fold_builtin_round (loc
, fndecl
, arg0
);
10578 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10579 CASE_FLT_FN (BUILT_IN_RINT
):
10580 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10582 CASE_FLT_FN (BUILT_IN_ICEIL
):
10583 CASE_FLT_FN (BUILT_IN_LCEIL
):
10584 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10585 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10586 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10587 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10588 CASE_FLT_FN (BUILT_IN_IROUND
):
10589 CASE_FLT_FN (BUILT_IN_LROUND
):
10590 CASE_FLT_FN (BUILT_IN_LLROUND
):
10591 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10593 CASE_FLT_FN (BUILT_IN_IRINT
):
10594 CASE_FLT_FN (BUILT_IN_LRINT
):
10595 CASE_FLT_FN (BUILT_IN_LLRINT
):
10596 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10598 case BUILT_IN_BSWAP16
:
10599 case BUILT_IN_BSWAP32
:
10600 case BUILT_IN_BSWAP64
:
10601 return fold_builtin_bswap (fndecl
, arg0
);
10603 CASE_INT_FN (BUILT_IN_FFS
):
10604 CASE_INT_FN (BUILT_IN_CLZ
):
10605 CASE_INT_FN (BUILT_IN_CTZ
):
10606 CASE_INT_FN (BUILT_IN_CLRSB
):
10607 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10608 CASE_INT_FN (BUILT_IN_PARITY
):
10609 return fold_builtin_bitop (fndecl
, arg0
);
10611 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10612 return fold_builtin_signbit (loc
, arg0
, type
);
10614 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10615 return fold_builtin_significand (loc
, arg0
, type
);
10617 CASE_FLT_FN (BUILT_IN_ILOGB
):
10618 CASE_FLT_FN (BUILT_IN_LOGB
):
10619 return fold_builtin_logb (loc
, arg0
, type
);
10621 case BUILT_IN_ISASCII
:
10622 return fold_builtin_isascii (loc
, arg0
);
10624 case BUILT_IN_TOASCII
:
10625 return fold_builtin_toascii (loc
, arg0
);
10627 case BUILT_IN_ISDIGIT
:
10628 return fold_builtin_isdigit (loc
, arg0
);
10630 CASE_FLT_FN (BUILT_IN_FINITE
):
10631 case BUILT_IN_FINITED32
:
10632 case BUILT_IN_FINITED64
:
10633 case BUILT_IN_FINITED128
:
10634 case BUILT_IN_ISFINITE
:
10636 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10639 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10642 CASE_FLT_FN (BUILT_IN_ISINF
):
10643 case BUILT_IN_ISINFD32
:
10644 case BUILT_IN_ISINFD64
:
10645 case BUILT_IN_ISINFD128
:
10647 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10650 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10653 case BUILT_IN_ISNORMAL
:
10654 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10656 case BUILT_IN_ISINF_SIGN
:
10657 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10659 CASE_FLT_FN (BUILT_IN_ISNAN
):
10660 case BUILT_IN_ISNAND32
:
10661 case BUILT_IN_ISNAND64
:
10662 case BUILT_IN_ISNAND128
:
10663 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10665 case BUILT_IN_PRINTF
:
10666 case BUILT_IN_PRINTF_UNLOCKED
:
10667 case BUILT_IN_VPRINTF
:
10668 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10670 case BUILT_IN_FREE
:
10671 if (integer_zerop (arg0
))
10672 return build_empty_stmt (loc
);
10683 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10684 IGNORE is true if the result of the function call is ignored. This
10685 function returns NULL_TREE if no simplification was possible. */
10688 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10690 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10691 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10695 CASE_FLT_FN (BUILT_IN_JN
):
10696 if (validate_arg (arg0
, INTEGER_TYPE
)
10697 && validate_arg (arg1
, REAL_TYPE
))
10698 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10701 CASE_FLT_FN (BUILT_IN_YN
):
10702 if (validate_arg (arg0
, INTEGER_TYPE
)
10703 && validate_arg (arg1
, REAL_TYPE
))
10704 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10708 CASE_FLT_FN (BUILT_IN_DREM
):
10709 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10710 if (validate_arg (arg0
, REAL_TYPE
)
10711 && validate_arg(arg1
, REAL_TYPE
))
10712 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10715 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10716 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10717 if (validate_arg (arg0
, REAL_TYPE
)
10718 && validate_arg(arg1
, POINTER_TYPE
))
10719 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10722 CASE_FLT_FN (BUILT_IN_ATAN2
):
10723 if (validate_arg (arg0
, REAL_TYPE
)
10724 && validate_arg(arg1
, REAL_TYPE
))
10725 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10728 CASE_FLT_FN (BUILT_IN_FDIM
):
10729 if (validate_arg (arg0
, REAL_TYPE
)
10730 && validate_arg(arg1
, REAL_TYPE
))
10731 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10734 CASE_FLT_FN (BUILT_IN_HYPOT
):
10735 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10737 CASE_FLT_FN (BUILT_IN_CPOW
):
10738 if (validate_arg (arg0
, COMPLEX_TYPE
)
10739 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10740 && validate_arg (arg1
, COMPLEX_TYPE
)
10741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10742 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10745 CASE_FLT_FN (BUILT_IN_LDEXP
):
10746 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10747 CASE_FLT_FN (BUILT_IN_SCALBN
):
10748 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10749 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10750 type
, /*ldexp=*/false);
10752 CASE_FLT_FN (BUILT_IN_FREXP
):
10753 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10755 CASE_FLT_FN (BUILT_IN_MODF
):
10756 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10758 case BUILT_IN_BZERO
:
10759 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10761 case BUILT_IN_FPUTS
:
10762 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10764 case BUILT_IN_FPUTS_UNLOCKED
:
10765 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10767 case BUILT_IN_STRSTR
:
10768 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10770 case BUILT_IN_STRCAT
:
10771 return fold_builtin_strcat (loc
, arg0
, arg1
);
10773 case BUILT_IN_STRSPN
:
10774 return fold_builtin_strspn (loc
, arg0
, arg1
);
10776 case BUILT_IN_STRCSPN
:
10777 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10779 case BUILT_IN_STRCHR
:
10780 case BUILT_IN_INDEX
:
10781 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10783 case BUILT_IN_STRRCHR
:
10784 case BUILT_IN_RINDEX
:
10785 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10787 case BUILT_IN_STRCPY
:
10788 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10790 case BUILT_IN_STPCPY
:
10793 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10797 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10800 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10803 case BUILT_IN_STRCMP
:
10804 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10806 case BUILT_IN_STRPBRK
:
10807 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10809 case BUILT_IN_EXPECT
:
10810 return fold_builtin_expect (loc
, arg0
, arg1
);
10812 CASE_FLT_FN (BUILT_IN_POW
):
10813 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10815 CASE_FLT_FN (BUILT_IN_POWI
):
10816 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10818 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10819 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10821 CASE_FLT_FN (BUILT_IN_FMIN
):
10822 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10824 CASE_FLT_FN (BUILT_IN_FMAX
):
10825 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10827 case BUILT_IN_ISGREATER
:
10828 return fold_builtin_unordered_cmp (loc
, fndecl
,
10829 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10830 case BUILT_IN_ISGREATEREQUAL
:
10831 return fold_builtin_unordered_cmp (loc
, fndecl
,
10832 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10833 case BUILT_IN_ISLESS
:
10834 return fold_builtin_unordered_cmp (loc
, fndecl
,
10835 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10836 case BUILT_IN_ISLESSEQUAL
:
10837 return fold_builtin_unordered_cmp (loc
, fndecl
,
10838 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10839 case BUILT_IN_ISLESSGREATER
:
10840 return fold_builtin_unordered_cmp (loc
, fndecl
,
10841 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10842 case BUILT_IN_ISUNORDERED
:
10843 return fold_builtin_unordered_cmp (loc
, fndecl
,
10844 arg0
, arg1
, UNORDERED_EXPR
,
10847 /* We do the folding for va_start in the expander. */
10848 case BUILT_IN_VA_START
:
10851 case BUILT_IN_SPRINTF
:
10852 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10854 case BUILT_IN_OBJECT_SIZE
:
10855 return fold_builtin_object_size (arg0
, arg1
);
10857 case BUILT_IN_PRINTF
:
10858 case BUILT_IN_PRINTF_UNLOCKED
:
10859 case BUILT_IN_VPRINTF
:
10860 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10862 case BUILT_IN_PRINTF_CHK
:
10863 case BUILT_IN_VPRINTF_CHK
:
10864 if (!validate_arg (arg0
, INTEGER_TYPE
)
10865 || TREE_SIDE_EFFECTS (arg0
))
10868 return fold_builtin_printf (loc
, fndecl
,
10869 arg1
, NULL_TREE
, ignore
, fcode
);
10872 case BUILT_IN_FPRINTF
:
10873 case BUILT_IN_FPRINTF_UNLOCKED
:
10874 case BUILT_IN_VFPRINTF
:
10875 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10878 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10879 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10881 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10882 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10890 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10891 and ARG2. IGNORE is true if the result of the function call is ignored.
10892 This function returns NULL_TREE if no simplification was possible. */
10895 fold_builtin_3 (location_t loc
, tree fndecl
,
10896 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10898 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10899 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10903 CASE_FLT_FN (BUILT_IN_SINCOS
):
10904 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10906 CASE_FLT_FN (BUILT_IN_FMA
):
10907 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10910 CASE_FLT_FN (BUILT_IN_REMQUO
):
10911 if (validate_arg (arg0
, REAL_TYPE
)
10912 && validate_arg(arg1
, REAL_TYPE
)
10913 && validate_arg(arg2
, POINTER_TYPE
))
10914 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10917 case BUILT_IN_MEMSET
:
10918 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10920 case BUILT_IN_BCOPY
:
10921 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10922 void_type_node
, true, /*endp=*/3);
10924 case BUILT_IN_MEMCPY
:
10925 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10926 type
, ignore
, /*endp=*/0);
10928 case BUILT_IN_MEMPCPY
:
10929 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10930 type
, ignore
, /*endp=*/1);
10932 case BUILT_IN_MEMMOVE
:
10933 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10934 type
, ignore
, /*endp=*/3);
10936 case BUILT_IN_STRNCAT
:
10937 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10939 case BUILT_IN_STRNCPY
:
10940 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10942 case BUILT_IN_STRNCMP
:
10943 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10945 case BUILT_IN_MEMCHR
:
10946 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10948 case BUILT_IN_BCMP
:
10949 case BUILT_IN_MEMCMP
:
10950 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10952 case BUILT_IN_SPRINTF
:
10953 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10955 case BUILT_IN_SNPRINTF
:
10956 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10958 case BUILT_IN_STRCPY_CHK
:
10959 case BUILT_IN_STPCPY_CHK
:
10960 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10963 case BUILT_IN_STRCAT_CHK
:
10964 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10966 case BUILT_IN_PRINTF_CHK
:
10967 case BUILT_IN_VPRINTF_CHK
:
10968 if (!validate_arg (arg0
, INTEGER_TYPE
)
10969 || TREE_SIDE_EFFECTS (arg0
))
10972 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10975 case BUILT_IN_FPRINTF
:
10976 case BUILT_IN_FPRINTF_UNLOCKED
:
10977 case BUILT_IN_VFPRINTF
:
10978 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10981 case BUILT_IN_FPRINTF_CHK
:
10982 case BUILT_IN_VFPRINTF_CHK
:
10983 if (!validate_arg (arg1
, INTEGER_TYPE
)
10984 || TREE_SIDE_EFFECTS (arg1
))
10987 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10996 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10997 ARG2, and ARG3. IGNORE is true if the result of the function call is
10998 ignored. This function returns NULL_TREE if no simplification was
11002 fold_builtin_4 (location_t loc
, tree fndecl
,
11003 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
11005 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11009 case BUILT_IN_MEMCPY_CHK
:
11010 case BUILT_IN_MEMPCPY_CHK
:
11011 case BUILT_IN_MEMMOVE_CHK
:
11012 case BUILT_IN_MEMSET_CHK
:
11013 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
11015 DECL_FUNCTION_CODE (fndecl
));
11017 case BUILT_IN_STRNCPY_CHK
:
11018 case BUILT_IN_STPNCPY_CHK
:
11019 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
11022 case BUILT_IN_STRNCAT_CHK
:
11023 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
11025 case BUILT_IN_SNPRINTF
:
11026 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
11028 case BUILT_IN_FPRINTF_CHK
:
11029 case BUILT_IN_VFPRINTF_CHK
:
11030 if (!validate_arg (arg1
, INTEGER_TYPE
)
11031 || TREE_SIDE_EFFECTS (arg1
))
11034 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
11044 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11045 arguments, where NARGS <= 4. IGNORE is true if the result of the
11046 function call is ignored. This function returns NULL_TREE if no
11047 simplification was possible. Note that this only folds builtins with
11048 fixed argument patterns. Foldings that do varargs-to-varargs
11049 transformations, or that match calls with more than 4 arguments,
11050 need to be handled with fold_builtin_varargs instead. */
11052 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11055 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
11057 tree ret
= NULL_TREE
;
11062 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
11065 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
11068 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
11071 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
11074 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
11082 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11083 SET_EXPR_LOCATION (ret
, loc
);
11084 TREE_NO_WARNING (ret
) = 1;
11090 /* Builtins with folding operations that operate on "..." arguments
11091 need special handling; we need to store the arguments in a convenient
11092 data structure before attempting any folding. Fortunately there are
11093 only a few builtins that fall into this category. FNDECL is the
11094 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11095 result of the function call is ignored. */
11098 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
11099 bool ignore ATTRIBUTE_UNUSED
)
11101 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11102 tree ret
= NULL_TREE
;
11106 case BUILT_IN_SPRINTF_CHK
:
11107 case BUILT_IN_VSPRINTF_CHK
:
11108 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
11111 case BUILT_IN_SNPRINTF_CHK
:
11112 case BUILT_IN_VSNPRINTF_CHK
:
11113 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
11116 case BUILT_IN_FPCLASSIFY
:
11117 ret
= fold_builtin_fpclassify (loc
, exp
);
11125 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11126 SET_EXPR_LOCATION (ret
, loc
);
11127 TREE_NO_WARNING (ret
) = 1;
11133 /* Return true if FNDECL shouldn't be folded right now.
11134 If a built-in function has an inline attribute always_inline
11135 wrapper, defer folding it after always_inline functions have
11136 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11137 might not be performed. */
11140 avoid_folding_inline_builtin (tree fndecl
)
11142 return (DECL_DECLARED_INLINE_P (fndecl
)
11143 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11145 && !cfun
->always_inline_functions_inlined
11146 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11149 /* A wrapper function for builtin folding that prevents warnings for
11150 "statement without effect" and the like, caused by removing the
11151 call node earlier than the warning is generated. */
11154 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11156 tree ret
= NULL_TREE
;
11157 tree fndecl
= get_callee_fndecl (exp
);
11159 && TREE_CODE (fndecl
) == FUNCTION_DECL
11160 && DECL_BUILT_IN (fndecl
)
11161 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11162 yet. Defer folding until we see all the arguments
11163 (after inlining). */
11164 && !CALL_EXPR_VA_ARG_PACK (exp
))
11166 int nargs
= call_expr_nargs (exp
);
11168 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11169 instead last argument is __builtin_va_arg_pack (). Defer folding
11170 even in that case, until arguments are finalized. */
11171 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11173 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11175 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11176 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11177 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11181 if (avoid_folding_inline_builtin (fndecl
))
11184 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11185 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11186 CALL_EXPR_ARGP (exp
), ignore
);
11189 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11191 tree
*args
= CALL_EXPR_ARGP (exp
);
11192 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11195 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11203 /* Conveniently construct a function call expression. FNDECL names the
11204 function to be called and N arguments are passed in the array
11208 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11210 tree fntype
= TREE_TYPE (fndecl
);
11211 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11213 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11216 /* Conveniently construct a function call expression. FNDECL names the
11217 function to be called and the arguments are passed in the vector
11221 build_call_expr_loc_vec (location_t loc
, tree fndecl
, VEC(tree
,gc
) *vec
)
11223 return build_call_expr_loc_array (loc
, fndecl
, VEC_length (tree
, vec
),
11224 VEC_address (tree
, vec
));
11228 /* Conveniently construct a function call expression. FNDECL names the
11229 function to be called, N is the number of arguments, and the "..."
11230 parameters are the argument expressions. */
11233 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11236 tree
*argarray
= XALLOCAVEC (tree
, n
);
11240 for (i
= 0; i
< n
; i
++)
11241 argarray
[i
] = va_arg (ap
, tree
);
11243 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11246 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11247 varargs macros aren't supported by all bootstrap compilers. */
11250 build_call_expr (tree fndecl
, int n
, ...)
11253 tree
*argarray
= XALLOCAVEC (tree
, n
);
11257 for (i
= 0; i
< n
; i
++)
11258 argarray
[i
] = va_arg (ap
, tree
);
11260 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11263 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11264 N arguments are passed in the array ARGARRAY. */
11267 fold_builtin_call_array (location_t loc
, tree type
,
11272 tree ret
= NULL_TREE
;
11275 if (TREE_CODE (fn
) == ADDR_EXPR
)
11277 tree fndecl
= TREE_OPERAND (fn
, 0);
11278 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11279 && DECL_BUILT_IN (fndecl
))
11281 /* If last argument is __builtin_va_arg_pack (), arguments to this
11282 function are not finalized yet. Defer folding until they are. */
11283 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11285 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11287 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11288 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11289 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11290 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11292 if (avoid_folding_inline_builtin (fndecl
))
11293 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11294 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11296 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11300 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11302 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11304 /* First try the transformations that don't require consing up
11306 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11311 /* If we got this far, we need to build an exp. */
11312 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11313 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11314 return ret
? ret
: exp
;
11318 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11321 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11322 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11323 of arguments in ARGS to be omitted. OLDNARGS is the number of
11324 elements in ARGS. */
11327 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11328 int skip
, tree fndecl
, int n
, va_list newargs
)
11330 int nargs
= oldnargs
- skip
+ n
;
11337 buffer
= XALLOCAVEC (tree
, nargs
);
11338 for (i
= 0; i
< n
; i
++)
11339 buffer
[i
] = va_arg (newargs
, tree
);
11340 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11341 buffer
[i
] = args
[j
];
11344 buffer
= args
+ skip
;
11346 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11349 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11350 list ARGS along with N new arguments specified as the "..."
11351 parameters. SKIP is the number of arguments in ARGS to be omitted.
11352 OLDNARGS is the number of elements in ARGS. */
11355 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11356 int skip
, tree fndecl
, int n
, ...)
11362 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11368 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11369 along with N new arguments specified as the "..." parameters. SKIP
11370 is the number of arguments in EXP to be omitted. This function is used
11371 to do varargs-to-varargs transformations. */
11374 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11380 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11381 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11387 /* Validate a single argument ARG against a tree code CODE representing
11391 validate_arg (const_tree arg
, enum tree_code code
)
11395 else if (code
== POINTER_TYPE
)
11396 return POINTER_TYPE_P (TREE_TYPE (arg
));
11397 else if (code
== INTEGER_TYPE
)
11398 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11399 return code
== TREE_CODE (TREE_TYPE (arg
));
11402 /* This function validates the types of a function call argument list
11403 against a specified list of tree_codes. If the last specifier is a 0,
11404 that represents an ellipses, otherwise the last specifier must be a
11407 This is the GIMPLE version of validate_arglist. Eventually we want to
11408 completely convert builtins.c to work from GIMPLEs and the tree based
11409 validate_arglist will then be removed. */
11412 validate_gimple_arglist (const_gimple call
, ...)
11414 enum tree_code code
;
11420 va_start (ap
, call
);
11425 code
= (enum tree_code
) va_arg (ap
, int);
11429 /* This signifies an ellipses, any further arguments are all ok. */
11433 /* This signifies an endlink, if no arguments remain, return
11434 true, otherwise return false. */
11435 res
= (i
== gimple_call_num_args (call
));
11438 /* If no parameters remain or the parameter's code does not
11439 match the specified code, return false. Otherwise continue
11440 checking any remaining arguments. */
11441 arg
= gimple_call_arg (call
, i
++);
11442 if (!validate_arg (arg
, code
))
11449 /* We need gotos here since we can only have one VA_CLOSE in a
11457 /* This function validates the types of a function call argument list
11458 against a specified list of tree_codes. If the last specifier is a 0,
11459 that represents an ellipses, otherwise the last specifier must be a
11463 validate_arglist (const_tree callexpr
, ...)
11465 enum tree_code code
;
11468 const_call_expr_arg_iterator iter
;
11471 va_start (ap
, callexpr
);
11472 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11476 code
= (enum tree_code
) va_arg (ap
, int);
11480 /* This signifies an ellipses, any further arguments are all ok. */
11484 /* This signifies an endlink, if no arguments remain, return
11485 true, otherwise return false. */
11486 res
= !more_const_call_expr_args_p (&iter
);
11489 /* If no parameters remain or the parameter's code does not
11490 match the specified code, return false. Otherwise continue
11491 checking any remaining arguments. */
11492 arg
= next_const_call_expr_arg (&iter
);
11493 if (!validate_arg (arg
, code
))
11500 /* We need gotos here since we can only have one VA_CLOSE in a
11508 /* Default target-specific builtin expander that does nothing. */
11511 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11512 rtx target ATTRIBUTE_UNUSED
,
11513 rtx subtarget ATTRIBUTE_UNUSED
,
11514 enum machine_mode mode ATTRIBUTE_UNUSED
,
11515 int ignore ATTRIBUTE_UNUSED
)
11520 /* Returns true is EXP represents data that would potentially reside
11521 in a readonly section. */
11524 readonly_data_expr (tree exp
)
11528 if (TREE_CODE (exp
) != ADDR_EXPR
)
11531 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11535 /* Make sure we call decl_readonly_section only for trees it
11536 can handle (since it returns true for everything it doesn't
11538 if (TREE_CODE (exp
) == STRING_CST
11539 || TREE_CODE (exp
) == CONSTRUCTOR
11540 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11541 return decl_readonly_section (exp
, 0);
11546 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11547 to the call, and TYPE is its return type.
11549 Return NULL_TREE if no simplification was possible, otherwise return the
11550 simplified form of the call as a tree.
11552 The simplified form may be a constant or other expression which
11553 computes the same value, but in a more efficient manner (including
11554 calls to other builtin functions).
11556 The call may contain arguments which need to be evaluated, but
11557 which are not useful to determine the result of the call. In
11558 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11559 COMPOUND_EXPR will be an argument which must be evaluated.
11560 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11561 COMPOUND_EXPR in the chain will contain the tree for the simplified
11562 form of the builtin function call. */
11565 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11567 if (!validate_arg (s1
, POINTER_TYPE
)
11568 || !validate_arg (s2
, POINTER_TYPE
))
11573 const char *p1
, *p2
;
11575 p2
= c_getstr (s2
);
11579 p1
= c_getstr (s1
);
11582 const char *r
= strstr (p1
, p2
);
11586 return build_int_cst (TREE_TYPE (s1
), 0);
11588 /* Return an offset into the constant string argument. */
11589 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11590 return fold_convert_loc (loc
, type
, tem
);
11593 /* The argument is const char *, and the result is char *, so we need
11594 a type conversion here to avoid a warning. */
11596 return fold_convert_loc (loc
, type
, s1
);
11601 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11605 /* New argument list transforming strstr(s1, s2) to
11606 strchr(s1, s2[0]). */
11607 return build_call_expr_loc (loc
, fn
, 2, s1
,
11608 build_int_cst (integer_type_node
, p2
[0]));
11612 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11613 the call, and TYPE is its return type.
11615 Return NULL_TREE if no simplification was possible, otherwise return the
11616 simplified form of the call as a tree.
11618 The simplified form may be a constant or other expression which
11619 computes the same value, but in a more efficient manner (including
11620 calls to other builtin functions).
11622 The call may contain arguments which need to be evaluated, but
11623 which are not useful to determine the result of the call. In
11624 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11625 COMPOUND_EXPR will be an argument which must be evaluated.
11626 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11627 COMPOUND_EXPR in the chain will contain the tree for the simplified
11628 form of the builtin function call. */
11631 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11633 if (!validate_arg (s1
, POINTER_TYPE
)
11634 || !validate_arg (s2
, INTEGER_TYPE
))
11640 if (TREE_CODE (s2
) != INTEGER_CST
)
11643 p1
= c_getstr (s1
);
11650 if (target_char_cast (s2
, &c
))
11653 r
= strchr (p1
, c
);
11656 return build_int_cst (TREE_TYPE (s1
), 0);
11658 /* Return an offset into the constant string argument. */
11659 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11660 return fold_convert_loc (loc
, type
, tem
);
11666 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11667 the call, and TYPE is its return type.
11669 Return NULL_TREE if no simplification was possible, otherwise return the
11670 simplified form of the call as a tree.
11672 The simplified form may be a constant or other expression which
11673 computes the same value, but in a more efficient manner (including
11674 calls to other builtin functions).
11676 The call may contain arguments which need to be evaluated, but
11677 which are not useful to determine the result of the call. In
11678 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11679 COMPOUND_EXPR will be an argument which must be evaluated.
11680 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11681 COMPOUND_EXPR in the chain will contain the tree for the simplified
11682 form of the builtin function call. */
11685 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11687 if (!validate_arg (s1
, POINTER_TYPE
)
11688 || !validate_arg (s2
, INTEGER_TYPE
))
11695 if (TREE_CODE (s2
) != INTEGER_CST
)
11698 p1
= c_getstr (s1
);
11705 if (target_char_cast (s2
, &c
))
11708 r
= strrchr (p1
, c
);
11711 return build_int_cst (TREE_TYPE (s1
), 0);
11713 /* Return an offset into the constant string argument. */
11714 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11715 return fold_convert_loc (loc
, type
, tem
);
11718 if (! integer_zerop (s2
))
11721 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11725 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11726 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11730 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11731 to the call, and TYPE is its return type.
11733 Return NULL_TREE if no simplification was possible, otherwise return the
11734 simplified form of the call as a tree.
11736 The simplified form may be a constant or other expression which
11737 computes the same value, but in a more efficient manner (including
11738 calls to other builtin functions).
11740 The call may contain arguments which need to be evaluated, but
11741 which are not useful to determine the result of the call. In
11742 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11743 COMPOUND_EXPR will be an argument which must be evaluated.
11744 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11745 COMPOUND_EXPR in the chain will contain the tree for the simplified
11746 form of the builtin function call. */
11749 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11751 if (!validate_arg (s1
, POINTER_TYPE
)
11752 || !validate_arg (s2
, POINTER_TYPE
))
11757 const char *p1
, *p2
;
11759 p2
= c_getstr (s2
);
11763 p1
= c_getstr (s1
);
11766 const char *r
= strpbrk (p1
, p2
);
11770 return build_int_cst (TREE_TYPE (s1
), 0);
11772 /* Return an offset into the constant string argument. */
11773 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11774 return fold_convert_loc (loc
, type
, tem
);
11778 /* strpbrk(x, "") == NULL.
11779 Evaluate and ignore s1 in case it had side-effects. */
11780 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11783 return NULL_TREE
; /* Really call strpbrk. */
11785 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11789 /* New argument list transforming strpbrk(s1, s2) to
11790 strchr(s1, s2[0]). */
11791 return build_call_expr_loc (loc
, fn
, 2, s1
,
11792 build_int_cst (integer_type_node
, p2
[0]));
11796 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11799 Return NULL_TREE if no simplification was possible, otherwise return the
11800 simplified form of the call as a tree.
11802 The simplified form may be a constant or other expression which
11803 computes the same value, but in a more efficient manner (including
11804 calls to other builtin functions).
11806 The call may contain arguments which need to be evaluated, but
11807 which are not useful to determine the result of the call. In
11808 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11809 COMPOUND_EXPR will be an argument which must be evaluated.
11810 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11811 COMPOUND_EXPR in the chain will contain the tree for the simplified
11812 form of the builtin function call. */
11815 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11817 if (!validate_arg (dst
, POINTER_TYPE
)
11818 || !validate_arg (src
, POINTER_TYPE
))
11822 const char *p
= c_getstr (src
);
11824 /* If the string length is zero, return the dst parameter. */
11825 if (p
&& *p
== '\0')
11828 if (optimize_insn_for_speed_p ())
11830 /* See if we can store by pieces into (dst + strlen(dst)). */
11832 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11833 tree strcpy_fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11835 if (!strlen_fn
|| !strcpy_fn
)
11838 /* If we don't have a movstr we don't want to emit an strcpy
11839 call. We have to do that if the length of the source string
11840 isn't computable (in that case we can use memcpy probably
11841 later expanding to a sequence of mov instructions). If we
11842 have movstr instructions we can emit strcpy calls. */
11845 tree len
= c_strlen (src
, 1);
11846 if (! len
|| TREE_SIDE_EFFECTS (len
))
11850 /* Stabilize the argument list. */
11851 dst
= builtin_save_expr (dst
);
11853 /* Create strlen (dst). */
11854 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11855 /* Create (dst p+ strlen (dst)). */
11857 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11858 newdst
= builtin_save_expr (newdst
);
11860 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11861 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11867 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11868 arguments to the call.
11870 Return NULL_TREE if no simplification was possible, otherwise return the
11871 simplified form of the call as a tree.
11873 The simplified form may be a constant or other expression which
11874 computes the same value, but in a more efficient manner (including
11875 calls to other builtin functions).
11877 The call may contain arguments which need to be evaluated, but
11878 which are not useful to determine the result of the call. In
11879 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11880 COMPOUND_EXPR will be an argument which must be evaluated.
11881 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11882 COMPOUND_EXPR in the chain will contain the tree for the simplified
11883 form of the builtin function call. */
11886 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11888 if (!validate_arg (dst
, POINTER_TYPE
)
11889 || !validate_arg (src
, POINTER_TYPE
)
11890 || !validate_arg (len
, INTEGER_TYPE
))
11894 const char *p
= c_getstr (src
);
11896 /* If the requested length is zero, or the src parameter string
11897 length is zero, return the dst parameter. */
11898 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11899 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11901 /* If the requested len is greater than or equal to the string
11902 length, call strcat. */
11903 if (TREE_CODE (len
) == INTEGER_CST
&& p
11904 && compare_tree_int (len
, strlen (p
)) >= 0)
11906 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11908 /* If the replacement _DECL isn't initialized, don't do the
11913 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11919 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11922 Return NULL_TREE if no simplification was possible, otherwise return the
11923 simplified form of the call as a tree.
11925 The simplified form may be a constant or other expression which
11926 computes the same value, but in a more efficient manner (including
11927 calls to other builtin functions).
11929 The call may contain arguments which need to be evaluated, but
11930 which are not useful to determine the result of the call. In
11931 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11932 COMPOUND_EXPR will be an argument which must be evaluated.
11933 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11934 COMPOUND_EXPR in the chain will contain the tree for the simplified
11935 form of the builtin function call. */
11938 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11940 if (!validate_arg (s1
, POINTER_TYPE
)
11941 || !validate_arg (s2
, POINTER_TYPE
))
11945 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11947 /* If both arguments are constants, evaluate at compile-time. */
11950 const size_t r
= strspn (p1
, p2
);
11951 return build_int_cst (size_type_node
, r
);
11954 /* If either argument is "", return NULL_TREE. */
11955 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11956 /* Evaluate and ignore both arguments in case either one has
11958 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11964 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11967 Return NULL_TREE if no simplification was possible, otherwise return the
11968 simplified form of the call as a tree.
11970 The simplified form may be a constant or other expression which
11971 computes the same value, but in a more efficient manner (including
11972 calls to other builtin functions).
11974 The call may contain arguments which need to be evaluated, but
11975 which are not useful to determine the result of the call. In
11976 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11977 COMPOUND_EXPR will be an argument which must be evaluated.
11978 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11979 COMPOUND_EXPR in the chain will contain the tree for the simplified
11980 form of the builtin function call. */
11983 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11985 if (!validate_arg (s1
, POINTER_TYPE
)
11986 || !validate_arg (s2
, POINTER_TYPE
))
11990 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11992 /* If both arguments are constants, evaluate at compile-time. */
11995 const size_t r
= strcspn (p1
, p2
);
11996 return build_int_cst (size_type_node
, r
);
11999 /* If the first argument is "", return NULL_TREE. */
12000 if (p1
&& *p1
== '\0')
12002 /* Evaluate and ignore argument s2 in case it has
12004 return omit_one_operand_loc (loc
, size_type_node
,
12005 size_zero_node
, s2
);
12008 /* If the second argument is "", return __builtin_strlen(s1). */
12009 if (p2
&& *p2
== '\0')
12011 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
12013 /* If the replacement _DECL isn't initialized, don't do the
12018 return build_call_expr_loc (loc
, fn
, 1, s1
);
12024 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12025 to the call. IGNORE is true if the value returned
12026 by the builtin will be ignored. UNLOCKED is true is true if this
12027 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12028 the known length of the string. Return NULL_TREE if no simplification
12032 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
12033 bool ignore
, bool unlocked
, tree len
)
12035 /* If we're using an unlocked function, assume the other unlocked
12036 functions exist explicitly. */
12037 tree
const fn_fputc
= (unlocked
12038 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
12039 : builtin_decl_implicit (BUILT_IN_FPUTC
));
12040 tree
const fn_fwrite
= (unlocked
12041 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
12042 : builtin_decl_implicit (BUILT_IN_FWRITE
));
12044 /* If the return value is used, don't do the transformation. */
12048 /* Verify the arguments in the original call. */
12049 if (!validate_arg (arg0
, POINTER_TYPE
)
12050 || !validate_arg (arg1
, POINTER_TYPE
))
12054 len
= c_strlen (arg0
, 0);
12056 /* Get the length of the string passed to fputs. If the length
12057 can't be determined, punt. */
12059 || TREE_CODE (len
) != INTEGER_CST
)
12062 switch (compare_tree_int (len
, 1))
12064 case -1: /* length is 0, delete the call entirely . */
12065 return omit_one_operand_loc (loc
, integer_type_node
,
12066 integer_zero_node
, arg1
);;
12068 case 0: /* length is 1, call fputc. */
12070 const char *p
= c_getstr (arg0
);
12075 return build_call_expr_loc (loc
, fn_fputc
, 2,
12077 (integer_type_node
, p
[0]), arg1
);
12083 case 1: /* length is greater than 1, call fwrite. */
12085 /* If optimizing for size keep fputs. */
12086 if (optimize_function_for_size_p (cfun
))
12088 /* New argument list transforming fputs(string, stream) to
12089 fwrite(string, 1, len, stream). */
12091 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
12092 size_one_node
, len
, arg1
);
12097 gcc_unreachable ();
12102 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12103 produced. False otherwise. This is done so that we don't output the error
12104 or warning twice or three times. */
12107 fold_builtin_next_arg (tree exp
, bool va_start_p
)
12109 tree fntype
= TREE_TYPE (current_function_decl
);
12110 int nargs
= call_expr_nargs (exp
);
12112 /* There is good chance the current input_location points inside the
12113 definition of the va_start macro (perhaps on the token for
12114 builtin) in a system header, so warnings will not be emitted.
12115 Use the location in real source code. */
12116 source_location current_location
=
12117 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
12120 if (!stdarg_p (fntype
))
12122 error ("%<va_start%> used in function with fixed args");
12128 if (va_start_p
&& (nargs
!= 2))
12130 error ("wrong number of arguments to function %<va_start%>");
12133 arg
= CALL_EXPR_ARG (exp
, 1);
12135 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12136 when we checked the arguments and if needed issued a warning. */
12141 /* Evidently an out of date version of <stdarg.h>; can't validate
12142 va_start's second argument, but can still work as intended. */
12143 warning_at (current_location
,
12145 "%<__builtin_next_arg%> called without an argument");
12148 else if (nargs
> 1)
12150 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12153 arg
= CALL_EXPR_ARG (exp
, 0);
12156 if (TREE_CODE (arg
) == SSA_NAME
)
12157 arg
= SSA_NAME_VAR (arg
);
12159 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12160 or __builtin_next_arg (0) the first time we see it, after checking
12161 the arguments and if needed issuing a warning. */
12162 if (!integer_zerop (arg
))
12164 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12166 /* Strip off all nops for the sake of the comparison. This
12167 is not quite the same as STRIP_NOPS. It does more.
12168 We must also strip off INDIRECT_EXPR for C++ reference
12170 while (CONVERT_EXPR_P (arg
)
12171 || TREE_CODE (arg
) == INDIRECT_REF
)
12172 arg
= TREE_OPERAND (arg
, 0);
12173 if (arg
!= last_parm
)
12175 /* FIXME: Sometimes with the tree optimizers we can get the
12176 not the last argument even though the user used the last
12177 argument. We just warn and set the arg to be the last
12178 argument so that we will get wrong-code because of
12180 warning_at (current_location
,
12182 "second parameter of %<va_start%> not last named argument");
12185 /* Undefined by C99 7.15.1.4p4 (va_start):
12186 "If the parameter parmN is declared with the register storage
12187 class, with a function or array type, or with a type that is
12188 not compatible with the type that results after application of
12189 the default argument promotions, the behavior is undefined."
12191 else if (DECL_REGISTER (arg
))
12193 warning_at (current_location
,
12195 "undefined behaviour when second parameter of "
12196 "%<va_start%> is declared with %<register%> storage");
12199 /* We want to verify the second parameter just once before the tree
12200 optimizers are run and then avoid keeping it in the tree,
12201 as otherwise we could warn even for correct code like:
12202 void foo (int i, ...)
12203 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12205 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12207 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12213 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12214 ORIG may be null if this is a 2-argument call. We don't attempt to
12215 simplify calls with more than 3 arguments.
12217 Return NULL_TREE if no simplification was possible, otherwise return the
12218 simplified form of the call as a tree. If IGNORED is true, it means that
12219 the caller does not use the returned value of the function. */
12222 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12223 tree orig
, int ignored
)
12226 const char *fmt_str
= NULL
;
12228 /* Verify the required arguments in the original call. We deal with two
12229 types of sprintf() calls: 'sprintf (str, fmt)' and
12230 'sprintf (dest, "%s", orig)'. */
12231 if (!validate_arg (dest
, POINTER_TYPE
)
12232 || !validate_arg (fmt
, POINTER_TYPE
))
12234 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12237 /* Check whether the format is a literal string constant. */
12238 fmt_str
= c_getstr (fmt
);
12239 if (fmt_str
== NULL
)
12243 retval
= NULL_TREE
;
12245 if (!init_target_chars ())
12248 /* If the format doesn't contain % args or %%, use strcpy. */
12249 if (strchr (fmt_str
, target_percent
) == NULL
)
12251 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12256 /* Don't optimize sprintf (buf, "abc", ptr++). */
12260 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12261 'format' is known to contain no % formats. */
12262 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12264 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12267 /* If the format is "%s", use strcpy if the result isn't used. */
12268 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12271 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12276 /* Don't crash on sprintf (str1, "%s"). */
12280 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12283 retval
= c_strlen (orig
, 1);
12284 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12287 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12290 if (call
&& retval
)
12292 retval
= fold_convert_loc
12293 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12295 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12301 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12302 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12303 attempt to simplify calls with more than 4 arguments.
12305 Return NULL_TREE if no simplification was possible, otherwise return the
12306 simplified form of the call as a tree. If IGNORED is true, it means that
12307 the caller does not use the returned value of the function. */
12310 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12311 tree orig
, int ignored
)
12314 const char *fmt_str
= NULL
;
12315 unsigned HOST_WIDE_INT destlen
;
12317 /* Verify the required arguments in the original call. We deal with two
12318 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12319 'snprintf (dest, cst, "%s", orig)'. */
12320 if (!validate_arg (dest
, POINTER_TYPE
)
12321 || !validate_arg (destsize
, INTEGER_TYPE
)
12322 || !validate_arg (fmt
, POINTER_TYPE
))
12324 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12327 if (!host_integerp (destsize
, 1))
12330 /* Check whether the format is a literal string constant. */
12331 fmt_str
= c_getstr (fmt
);
12332 if (fmt_str
== NULL
)
12336 retval
= NULL_TREE
;
12338 if (!init_target_chars ())
12341 destlen
= tree_low_cst (destsize
, 1);
12343 /* If the format doesn't contain % args or %%, use strcpy. */
12344 if (strchr (fmt_str
, target_percent
) == NULL
)
12346 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12347 size_t len
= strlen (fmt_str
);
12349 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12353 /* We could expand this as
12354 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12356 memcpy (str, fmt_with_nul_at_cstm1, cst);
12357 but in the former case that might increase code size
12358 and in the latter case grow .rodata section too much.
12359 So punt for now. */
12360 if (len
>= destlen
)
12366 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12367 'format' is known to contain no % formats and
12368 strlen (fmt) < cst. */
12369 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12372 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12375 /* If the format is "%s", use strcpy if the result isn't used. */
12376 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12378 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12379 unsigned HOST_WIDE_INT origlen
;
12381 /* Don't crash on snprintf (str1, cst, "%s"). */
12385 retval
= c_strlen (orig
, 1);
12386 if (!retval
|| !host_integerp (retval
, 1))
12389 origlen
= tree_low_cst (retval
, 1);
12390 /* We could expand this as
12391 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12393 memcpy (str1, str2_with_nul_at_cstm1, cst);
12394 but in the former case that might increase code size
12395 and in the latter case grow .rodata section too much.
12396 So punt for now. */
12397 if (origlen
>= destlen
)
12400 /* Convert snprintf (str1, cst, "%s", str2) into
12401 strcpy (str1, str2) if strlen (str2) < cst. */
12405 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12408 retval
= NULL_TREE
;
12411 if (call
&& retval
)
12413 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12414 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12415 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12421 /* Expand a call EXP to __builtin_object_size. */
12424 expand_builtin_object_size (tree exp
)
12427 int object_size_type
;
12428 tree fndecl
= get_callee_fndecl (exp
);
12430 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12432 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12434 expand_builtin_trap ();
12438 ost
= CALL_EXPR_ARG (exp
, 1);
12441 if (TREE_CODE (ost
) != INTEGER_CST
12442 || tree_int_cst_sgn (ost
) < 0
12443 || compare_tree_int (ost
, 3) > 0)
12445 error ("%Klast argument of %D is not integer constant between 0 and 3",
12447 expand_builtin_trap ();
12451 object_size_type
= tree_low_cst (ost
, 0);
12453 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12456 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12457 FCODE is the BUILT_IN_* to use.
12458 Return NULL_RTX if we failed; the caller should emit a normal call,
12459 otherwise try to get the result in TARGET, if convenient (and in
12460 mode MODE if that's convenient). */
12463 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12464 enum built_in_function fcode
)
12466 tree dest
, src
, len
, size
;
12468 if (!validate_arglist (exp
,
12470 fcode
== BUILT_IN_MEMSET_CHK
12471 ? INTEGER_TYPE
: POINTER_TYPE
,
12472 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12475 dest
= CALL_EXPR_ARG (exp
, 0);
12476 src
= CALL_EXPR_ARG (exp
, 1);
12477 len
= CALL_EXPR_ARG (exp
, 2);
12478 size
= CALL_EXPR_ARG (exp
, 3);
12480 if (! host_integerp (size
, 1))
12483 if (host_integerp (len
, 1) || integer_all_onesp (size
))
12487 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12489 warning_at (tree_nonartificial_location (exp
),
12490 0, "%Kcall to %D will always overflow destination buffer",
12491 exp
, get_callee_fndecl (exp
));
12496 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12497 mem{cpy,pcpy,move,set} is available. */
12500 case BUILT_IN_MEMCPY_CHK
:
12501 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12503 case BUILT_IN_MEMPCPY_CHK
:
12504 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12506 case BUILT_IN_MEMMOVE_CHK
:
12507 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12509 case BUILT_IN_MEMSET_CHK
:
12510 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12519 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12520 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12521 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12522 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12524 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12528 unsigned int dest_align
= get_pointer_alignment (dest
);
12530 /* If DEST is not a pointer type, call the normal function. */
12531 if (dest_align
== 0)
12534 /* If SRC and DEST are the same (and not volatile), do nothing. */
12535 if (operand_equal_p (src
, dest
, 0))
12539 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12541 /* Evaluate and ignore LEN in case it has side-effects. */
12542 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12543 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12546 expr
= fold_build_pointer_plus (dest
, len
);
12547 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12550 /* __memmove_chk special case. */
12551 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12553 unsigned int src_align
= get_pointer_alignment (src
);
12555 if (src_align
== 0)
12558 /* If src is categorized for a readonly section we can use
12559 normal __memcpy_chk. */
12560 if (readonly_data_expr (src
))
12562 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12565 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12566 dest
, src
, len
, size
);
12567 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12568 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12569 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12576 /* Emit warning if a buffer overflow is detected at compile time. */
12579 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12583 location_t loc
= tree_nonartificial_location (exp
);
12587 case BUILT_IN_STRCPY_CHK
:
12588 case BUILT_IN_STPCPY_CHK
:
12589 /* For __strcat_chk the warning will be emitted only if overflowing
12590 by at least strlen (dest) + 1 bytes. */
12591 case BUILT_IN_STRCAT_CHK
:
12592 len
= CALL_EXPR_ARG (exp
, 1);
12593 size
= CALL_EXPR_ARG (exp
, 2);
12596 case BUILT_IN_STRNCAT_CHK
:
12597 case BUILT_IN_STRNCPY_CHK
:
12598 case BUILT_IN_STPNCPY_CHK
:
12599 len
= CALL_EXPR_ARG (exp
, 2);
12600 size
= CALL_EXPR_ARG (exp
, 3);
12602 case BUILT_IN_SNPRINTF_CHK
:
12603 case BUILT_IN_VSNPRINTF_CHK
:
12604 len
= CALL_EXPR_ARG (exp
, 1);
12605 size
= CALL_EXPR_ARG (exp
, 3);
12608 gcc_unreachable ();
12614 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12619 len
= c_strlen (len
, 1);
12620 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12623 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12625 tree src
= CALL_EXPR_ARG (exp
, 1);
12626 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12628 src
= c_strlen (src
, 1);
12629 if (! src
|| ! host_integerp (src
, 1))
12631 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12632 exp
, get_callee_fndecl (exp
));
12635 else if (tree_int_cst_lt (src
, size
))
12638 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12641 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12642 exp
, get_callee_fndecl (exp
));
12645 /* Emit warning if a buffer overflow is detected at compile time
12646 in __sprintf_chk/__vsprintf_chk calls. */
12649 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12651 tree size
, len
, fmt
;
12652 const char *fmt_str
;
12653 int nargs
= call_expr_nargs (exp
);
12655 /* Verify the required arguments in the original call. */
12659 size
= CALL_EXPR_ARG (exp
, 2);
12660 fmt
= CALL_EXPR_ARG (exp
, 3);
12662 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12665 /* Check whether the format is a literal string constant. */
12666 fmt_str
= c_getstr (fmt
);
12667 if (fmt_str
== NULL
)
12670 if (!init_target_chars ())
12673 /* If the format doesn't contain % args or %%, we know its size. */
12674 if (strchr (fmt_str
, target_percent
) == 0)
12675 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12676 /* If the format is "%s" and first ... argument is a string literal,
12678 else if (fcode
== BUILT_IN_SPRINTF_CHK
12679 && strcmp (fmt_str
, target_percent_s
) == 0)
12685 arg
= CALL_EXPR_ARG (exp
, 4);
12686 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12689 len
= c_strlen (arg
, 1);
12690 if (!len
|| ! host_integerp (len
, 1))
12696 if (! tree_int_cst_lt (len
, size
))
12697 warning_at (tree_nonartificial_location (exp
),
12698 0, "%Kcall to %D will always overflow destination buffer",
12699 exp
, get_callee_fndecl (exp
));
12702 /* Emit warning if a free is called with address of a variable. */
12705 maybe_emit_free_warning (tree exp
)
12707 tree arg
= CALL_EXPR_ARG (exp
, 0);
12710 if (TREE_CODE (arg
) != ADDR_EXPR
)
12713 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12714 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12717 if (SSA_VAR_P (arg
))
12718 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12719 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12721 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12722 "%Kattempt to free a non-heap object", exp
);
12725 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12729 fold_builtin_object_size (tree ptr
, tree ost
)
12731 unsigned HOST_WIDE_INT bytes
;
12732 int object_size_type
;
12734 if (!validate_arg (ptr
, POINTER_TYPE
)
12735 || !validate_arg (ost
, INTEGER_TYPE
))
12740 if (TREE_CODE (ost
) != INTEGER_CST
12741 || tree_int_cst_sgn (ost
) < 0
12742 || compare_tree_int (ost
, 3) > 0)
12745 object_size_type
= tree_low_cst (ost
, 0);
12747 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12748 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12749 and (size_t) 0 for types 2 and 3. */
12750 if (TREE_SIDE_EFFECTS (ptr
))
12751 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12753 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12755 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12756 if (double_int_fits_to_tree_p (size_type_node
,
12757 double_int::from_uhwi (bytes
)))
12758 return build_int_cstu (size_type_node
, bytes
);
12760 else if (TREE_CODE (ptr
) == SSA_NAME
)
12762 /* If object size is not known yet, delay folding until
12763 later. Maybe subsequent passes will help determining
12765 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12766 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12767 && double_int_fits_to_tree_p (size_type_node
,
12768 double_int::from_uhwi (bytes
)))
12769 return build_int_cstu (size_type_node
, bytes
);
12775 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12776 DEST, SRC, LEN, and SIZE are the arguments to the call.
12777 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12778 code of the builtin. If MAXLEN is not NULL, it is maximum length
12779 passed as third argument. */
12782 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12783 tree dest
, tree src
, tree len
, tree size
,
12784 tree maxlen
, bool ignore
,
12785 enum built_in_function fcode
)
12789 if (!validate_arg (dest
, POINTER_TYPE
)
12790 || !validate_arg (src
,
12791 (fcode
== BUILT_IN_MEMSET_CHK
12792 ? INTEGER_TYPE
: POINTER_TYPE
))
12793 || !validate_arg (len
, INTEGER_TYPE
)
12794 || !validate_arg (size
, INTEGER_TYPE
))
12797 /* If SRC and DEST are the same (and not volatile), return DEST
12798 (resp. DEST+LEN for __mempcpy_chk). */
12799 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12801 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12802 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12806 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12807 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12811 if (! host_integerp (size
, 1))
12814 if (! integer_all_onesp (size
))
12816 if (! host_integerp (len
, 1))
12818 /* If LEN is not constant, try MAXLEN too.
12819 For MAXLEN only allow optimizing into non-_ocs function
12820 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12821 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12823 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12825 /* (void) __mempcpy_chk () can be optimized into
12826 (void) __memcpy_chk (). */
12827 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12831 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12839 if (tree_int_cst_lt (size
, maxlen
))
12844 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12845 mem{cpy,pcpy,move,set} is available. */
12848 case BUILT_IN_MEMCPY_CHK
:
12849 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12851 case BUILT_IN_MEMPCPY_CHK
:
12852 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12854 case BUILT_IN_MEMMOVE_CHK
:
12855 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12857 case BUILT_IN_MEMSET_CHK
:
12858 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12867 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12870 /* Fold a call to the __st[rp]cpy_chk builtin.
12871 DEST, SRC, and SIZE are the arguments to the call.
12872 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12873 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12874 strings passed as second argument. */
12877 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12878 tree src
, tree size
,
12879 tree maxlen
, bool ignore
,
12880 enum built_in_function fcode
)
12884 if (!validate_arg (dest
, POINTER_TYPE
)
12885 || !validate_arg (src
, POINTER_TYPE
)
12886 || !validate_arg (size
, INTEGER_TYPE
))
12889 /* If SRC and DEST are the same (and not volatile), return DEST. */
12890 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12891 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12893 if (! host_integerp (size
, 1))
12896 if (! integer_all_onesp (size
))
12898 len
= c_strlen (src
, 1);
12899 if (! len
|| ! host_integerp (len
, 1))
12901 /* If LEN is not constant, try MAXLEN too.
12902 For MAXLEN only allow optimizing into non-_ocs function
12903 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12904 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12906 if (fcode
== BUILT_IN_STPCPY_CHK
)
12911 /* If return value of __stpcpy_chk is ignored,
12912 optimize into __strcpy_chk. */
12913 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12917 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12920 if (! len
|| TREE_SIDE_EFFECTS (len
))
12923 /* If c_strlen returned something, but not a constant,
12924 transform __strcpy_chk into __memcpy_chk. */
12925 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12929 len
= fold_convert_loc (loc
, size_type_node
, len
);
12930 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12931 build_int_cst (size_type_node
, 1));
12932 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12933 build_call_expr_loc (loc
, fn
, 4,
12934 dest
, src
, len
, size
));
12940 if (! tree_int_cst_lt (maxlen
, size
))
12944 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12945 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12946 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12950 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12953 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12954 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12955 length passed as third argument. IGNORE is true if return value can be
12956 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12959 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12960 tree len
, tree size
, tree maxlen
, bool ignore
,
12961 enum built_in_function fcode
)
12965 if (!validate_arg (dest
, POINTER_TYPE
)
12966 || !validate_arg (src
, POINTER_TYPE
)
12967 || !validate_arg (len
, INTEGER_TYPE
)
12968 || !validate_arg (size
, INTEGER_TYPE
))
12971 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
12973 /* If return value of __stpncpy_chk is ignored,
12974 optimize into __strncpy_chk. */
12975 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
12977 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12980 if (! host_integerp (size
, 1))
12983 if (! integer_all_onesp (size
))
12985 if (! host_integerp (len
, 1))
12987 /* If LEN is not constant, try MAXLEN too.
12988 For MAXLEN only allow optimizing into non-_ocs function
12989 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12990 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12996 if (tree_int_cst_lt (size
, maxlen
))
13000 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13001 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
13002 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
13006 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13009 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13010 are the arguments to the call. */
13013 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
13014 tree src
, tree size
)
13019 if (!validate_arg (dest
, POINTER_TYPE
)
13020 || !validate_arg (src
, POINTER_TYPE
)
13021 || !validate_arg (size
, INTEGER_TYPE
))
13024 p
= c_getstr (src
);
13025 /* If the SRC parameter is "", return DEST. */
13026 if (p
&& *p
== '\0')
13027 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13029 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
13032 /* If __builtin_strcat_chk is used, assume strcat is available. */
13033 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
13037 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
13040 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13044 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
13045 tree dest
, tree src
, tree len
, tree size
)
13050 if (!validate_arg (dest
, POINTER_TYPE
)
13051 || !validate_arg (src
, POINTER_TYPE
)
13052 || !validate_arg (size
, INTEGER_TYPE
)
13053 || !validate_arg (size
, INTEGER_TYPE
))
13056 p
= c_getstr (src
);
13057 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13058 if (p
&& *p
== '\0')
13059 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
13060 else if (integer_zerop (len
))
13061 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13063 if (! host_integerp (size
, 1))
13066 if (! integer_all_onesp (size
))
13068 tree src_len
= c_strlen (src
, 1);
13070 && host_integerp (src_len
, 1)
13071 && host_integerp (len
, 1)
13072 && ! tree_int_cst_lt (len
, src_len
))
13074 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13075 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
13079 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
13084 /* If __builtin_strncat_chk is used, assume strncat is available. */
13085 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
13089 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13092 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13093 Return NULL_TREE if a normal call should be emitted rather than
13094 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13095 or BUILT_IN_VSPRINTF_CHK. */
13098 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13099 enum built_in_function fcode
)
13101 tree dest
, size
, len
, fn
, fmt
, flag
;
13102 const char *fmt_str
;
13104 /* Verify the required arguments in the original call. */
13108 if (!validate_arg (dest
, POINTER_TYPE
))
13111 if (!validate_arg (flag
, INTEGER_TYPE
))
13114 if (!validate_arg (size
, INTEGER_TYPE
))
13117 if (!validate_arg (fmt
, POINTER_TYPE
))
13120 if (! host_integerp (size
, 1))
13125 if (!init_target_chars ())
13128 /* Check whether the format is a literal string constant. */
13129 fmt_str
= c_getstr (fmt
);
13130 if (fmt_str
!= NULL
)
13132 /* If the format doesn't contain % args or %%, we know the size. */
13133 if (strchr (fmt_str
, target_percent
) == 0)
13135 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13136 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13138 /* If the format is "%s" and first ... argument is a string literal,
13139 we know the size too. */
13140 else if (fcode
== BUILT_IN_SPRINTF_CHK
13141 && strcmp (fmt_str
, target_percent_s
) == 0)
13148 if (validate_arg (arg
, POINTER_TYPE
))
13150 len
= c_strlen (arg
, 1);
13151 if (! len
|| ! host_integerp (len
, 1))
13158 if (! integer_all_onesp (size
))
13160 if (! len
|| ! tree_int_cst_lt (len
, size
))
13164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13165 or if format doesn't contain % chars or is "%s". */
13166 if (! integer_zerop (flag
))
13168 if (fmt_str
== NULL
)
13170 if (strchr (fmt_str
, target_percent
) != NULL
13171 && strcmp (fmt_str
, target_percent_s
))
13175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13176 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13177 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13181 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13184 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13185 a normal call should be emitted rather than expanding the function
13186 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13189 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13190 enum built_in_function fcode
)
13192 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13193 CALL_EXPR_ARGP (exp
), fcode
);
13196 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13197 NULL_TREE if a normal call should be emitted rather than expanding
13198 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13199 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13200 passed as second argument. */
13203 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13204 tree maxlen
, enum built_in_function fcode
)
13206 tree dest
, size
, len
, fn
, fmt
, flag
;
13207 const char *fmt_str
;
13209 /* Verify the required arguments in the original call. */
13213 if (!validate_arg (dest
, POINTER_TYPE
))
13216 if (!validate_arg (len
, INTEGER_TYPE
))
13219 if (!validate_arg (flag
, INTEGER_TYPE
))
13222 if (!validate_arg (size
, INTEGER_TYPE
))
13225 if (!validate_arg (fmt
, POINTER_TYPE
))
13228 if (! host_integerp (size
, 1))
13231 if (! integer_all_onesp (size
))
13233 if (! host_integerp (len
, 1))
13235 /* If LEN is not constant, try MAXLEN too.
13236 For MAXLEN only allow optimizing into non-_ocs function
13237 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13238 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13244 if (tree_int_cst_lt (size
, maxlen
))
13248 if (!init_target_chars ())
13251 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13252 or if format doesn't contain % chars or is "%s". */
13253 if (! integer_zerop (flag
))
13255 fmt_str
= c_getstr (fmt
);
13256 if (fmt_str
== NULL
)
13258 if (strchr (fmt_str
, target_percent
) != NULL
13259 && strcmp (fmt_str
, target_percent_s
))
13263 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13265 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13266 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13270 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13273 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13274 a normal call should be emitted rather than expanding the function
13275 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13276 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13277 passed as second argument. */
13280 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13281 enum built_in_function fcode
)
13283 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13284 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13287 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13288 FMT and ARG are the arguments to the call; we don't fold cases with
13289 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13291 Return NULL_TREE if no simplification was possible, otherwise return the
13292 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13293 code of the function to be simplified. */
13296 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13297 tree arg
, bool ignore
,
13298 enum built_in_function fcode
)
13300 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13301 const char *fmt_str
= NULL
;
13303 /* If the return value is used, don't do the transformation. */
13307 /* Verify the required arguments in the original call. */
13308 if (!validate_arg (fmt
, POINTER_TYPE
))
13311 /* Check whether the format is a literal string constant. */
13312 fmt_str
= c_getstr (fmt
);
13313 if (fmt_str
== NULL
)
13316 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13318 /* If we're using an unlocked function, assume the other
13319 unlocked functions exist explicitly. */
13320 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13321 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13325 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13326 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13329 if (!init_target_chars ())
13332 if (strcmp (fmt_str
, target_percent_s
) == 0
13333 || strchr (fmt_str
, target_percent
) == NULL
)
13337 if (strcmp (fmt_str
, target_percent_s
) == 0)
13339 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13342 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13345 str
= c_getstr (arg
);
13351 /* The format specifier doesn't contain any '%' characters. */
13352 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13358 /* If the string was "", printf does nothing. */
13359 if (str
[0] == '\0')
13360 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13362 /* If the string has length of 1, call putchar. */
13363 if (str
[1] == '\0')
13365 /* Given printf("c"), (where c is any one character,)
13366 convert "c"[0] to an int and pass that to the replacement
13368 newarg
= build_int_cst (integer_type_node
, str
[0]);
13370 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13374 /* If the string was "string\n", call puts("string"). */
13375 size_t len
= strlen (str
);
13376 if ((unsigned char)str
[len
- 1] == target_newline
13377 && (size_t) (int) len
== len
13381 tree offset_node
, string_cst
;
13383 /* Create a NUL-terminated string that's one char shorter
13384 than the original, stripping off the trailing '\n'. */
13385 newarg
= build_string_literal (len
, str
);
13386 string_cst
= string_constant (newarg
, &offset_node
);
13387 gcc_checking_assert (string_cst
13388 && (TREE_STRING_LENGTH (string_cst
)
13390 && integer_zerop (offset_node
)
13392 TREE_STRING_POINTER (string_cst
)[len
- 1]
13393 == target_newline
);
13394 /* build_string_literal creates a new STRING_CST,
13395 modify it in place to avoid double copying. */
13396 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13397 newstr
[len
- 1] = '\0';
13399 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13402 /* We'd like to arrange to call fputs(string,stdout) here,
13403 but we need stdout and don't have a way to get it yet. */
13408 /* The other optimizations can be done only on the non-va_list variants. */
13409 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13412 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13413 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13415 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13418 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13421 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13422 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13424 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13427 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13433 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13436 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13437 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13438 more than 3 arguments, and ARG may be null in the 2-argument case.
13440 Return NULL_TREE if no simplification was possible, otherwise return the
13441 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13442 code of the function to be simplified. */
13445 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13446 tree fmt
, tree arg
, bool ignore
,
13447 enum built_in_function fcode
)
13449 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13450 const char *fmt_str
= NULL
;
13452 /* If the return value is used, don't do the transformation. */
13456 /* Verify the required arguments in the original call. */
13457 if (!validate_arg (fp
, POINTER_TYPE
))
13459 if (!validate_arg (fmt
, POINTER_TYPE
))
13462 /* Check whether the format is a literal string constant. */
13463 fmt_str
= c_getstr (fmt
);
13464 if (fmt_str
== NULL
)
13467 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13469 /* If we're using an unlocked function, assume the other
13470 unlocked functions exist explicitly. */
13471 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13472 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13476 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13477 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13480 if (!init_target_chars ())
13483 /* If the format doesn't contain % args or %%, use strcpy. */
13484 if (strchr (fmt_str
, target_percent
) == NULL
)
13486 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13490 /* If the format specifier was "", fprintf does nothing. */
13491 if (fmt_str
[0] == '\0')
13493 /* If FP has side-effects, just wait until gimplification is
13495 if (TREE_SIDE_EFFECTS (fp
))
13498 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13501 /* When "string" doesn't contain %, replace all cases of
13502 fprintf (fp, string) with fputs (string, fp). The fputs
13503 builtin will take care of special cases like length == 1. */
13505 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13508 /* The other optimizations can be done only on the non-va_list variants. */
13509 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13512 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13513 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13515 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13518 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13521 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13522 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13524 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13527 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13532 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13535 /* Initialize format string characters in the target charset. */
13538 init_target_chars (void)
13543 target_newline
= lang_hooks
.to_target_charset ('\n');
13544 target_percent
= lang_hooks
.to_target_charset ('%');
13545 target_c
= lang_hooks
.to_target_charset ('c');
13546 target_s
= lang_hooks
.to_target_charset ('s');
13547 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13551 target_percent_c
[0] = target_percent
;
13552 target_percent_c
[1] = target_c
;
13553 target_percent_c
[2] = '\0';
13555 target_percent_s
[0] = target_percent
;
13556 target_percent_s
[1] = target_s
;
13557 target_percent_s
[2] = '\0';
13559 target_percent_s_newline
[0] = target_percent
;
13560 target_percent_s_newline
[1] = target_s
;
13561 target_percent_s_newline
[2] = target_newline
;
13562 target_percent_s_newline
[3] = '\0';
13569 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13570 and no overflow/underflow occurred. INEXACT is true if M was not
13571 exactly calculated. TYPE is the tree type for the result. This
13572 function assumes that you cleared the MPFR flags and then
13573 calculated M to see if anything subsequently set a flag prior to
13574 entering this function. Return NULL_TREE if any checks fail. */
13577 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13579 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13580 overflow/underflow occurred. If -frounding-math, proceed iff the
13581 result of calling FUNC was exact. */
13582 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13583 && (!flag_rounding_math
|| !inexact
))
13585 REAL_VALUE_TYPE rr
;
13587 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13588 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13589 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13590 but the mpft_t is not, then we underflowed in the
13592 if (real_isfinite (&rr
)
13593 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13595 REAL_VALUE_TYPE rmode
;
13597 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13598 /* Proceed iff the specified mode can hold the value. */
13599 if (real_identical (&rmode
, &rr
))
13600 return build_real (type
, rmode
);
13606 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13607 number and no overflow/underflow occurred. INEXACT is true if M
13608 was not exactly calculated. TYPE is the tree type for the result.
13609 This function assumes that you cleared the MPFR flags and then
13610 calculated M to see if anything subsequently set a flag prior to
13611 entering this function. Return NULL_TREE if any checks fail, if
13612 FORCE_CONVERT is true, then bypass the checks. */
13615 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13617 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13618 overflow/underflow occurred. If -frounding-math, proceed iff the
13619 result of calling FUNC was exact. */
13621 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13622 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13623 && (!flag_rounding_math
|| !inexact
)))
13625 REAL_VALUE_TYPE re
, im
;
13627 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13628 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13629 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13630 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13631 but the mpft_t is not, then we underflowed in the
13634 || (real_isfinite (&re
) && real_isfinite (&im
)
13635 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13636 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13638 REAL_VALUE_TYPE re_mode
, im_mode
;
13640 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13641 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13642 /* Proceed iff the specified mode can hold the value. */
13644 || (real_identical (&re_mode
, &re
)
13645 && real_identical (&im_mode
, &im
)))
13646 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13647 build_real (TREE_TYPE (type
), im_mode
));
13653 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13654 FUNC on it and return the resulting value as a tree with type TYPE.
13655 If MIN and/or MAX are not NULL, then the supplied ARG must be
13656 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13657 acceptable values, otherwise they are not. The mpfr precision is
13658 set to the precision of TYPE. We assume that function FUNC returns
13659 zero if the result could be calculated exactly within the requested
13663 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13664 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13667 tree result
= NULL_TREE
;
13671 /* To proceed, MPFR must exactly represent the target floating point
13672 format, which only happens when the target base equals two. */
13673 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13674 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13676 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13678 if (real_isfinite (ra
)
13679 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13680 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13682 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13683 const int prec
= fmt
->p
;
13684 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13688 mpfr_init2 (m
, prec
);
13689 mpfr_from_real (m
, ra
, GMP_RNDN
);
13690 mpfr_clear_flags ();
13691 inexact
= func (m
, m
, rnd
);
13692 result
= do_mpfr_ckconv (m
, type
, inexact
);
13700 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13701 FUNC on it and return the resulting value as a tree with type TYPE.
13702 The mpfr precision is set to the precision of TYPE. We assume that
13703 function FUNC returns zero if the result could be calculated
13704 exactly within the requested precision. */
13707 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13708 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13710 tree result
= NULL_TREE
;
13715 /* To proceed, MPFR must exactly represent the target floating point
13716 format, which only happens when the target base equals two. */
13717 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13718 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13719 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13721 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13722 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13724 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13726 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13727 const int prec
= fmt
->p
;
13728 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13732 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13733 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13734 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13735 mpfr_clear_flags ();
13736 inexact
= func (m1
, m1
, m2
, rnd
);
13737 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13738 mpfr_clears (m1
, m2
, NULL
);
13745 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13746 FUNC on it and return the resulting value as a tree with type TYPE.
13747 The mpfr precision is set to the precision of TYPE. We assume that
13748 function FUNC returns zero if the result could be calculated
13749 exactly within the requested precision. */
13752 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13753 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13755 tree result
= NULL_TREE
;
13761 /* To proceed, MPFR must exactly represent the target floating point
13762 format, which only happens when the target base equals two. */
13763 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13764 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13765 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13766 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13768 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13769 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13770 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13772 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13774 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13775 const int prec
= fmt
->p
;
13776 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13780 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13781 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13782 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13783 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13784 mpfr_clear_flags ();
13785 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13786 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13787 mpfr_clears (m1
, m2
, m3
, NULL
);
13794 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13795 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13796 If ARG_SINP and ARG_COSP are NULL then the result is returned
13797 as a complex value.
13798 The type is taken from the type of ARG and is used for setting the
13799 precision of the calculation and results. */
13802 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13804 tree
const type
= TREE_TYPE (arg
);
13805 tree result
= NULL_TREE
;
13809 /* To proceed, MPFR must exactly represent the target floating point
13810 format, which only happens when the target base equals two. */
13811 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13812 && TREE_CODE (arg
) == REAL_CST
13813 && !TREE_OVERFLOW (arg
))
13815 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13817 if (real_isfinite (ra
))
13819 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13820 const int prec
= fmt
->p
;
13821 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13822 tree result_s
, result_c
;
13826 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13827 mpfr_from_real (m
, ra
, GMP_RNDN
);
13828 mpfr_clear_flags ();
13829 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13830 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13831 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13832 mpfr_clears (m
, ms
, mc
, NULL
);
13833 if (result_s
&& result_c
)
13835 /* If we are to return in a complex value do so. */
13836 if (!arg_sinp
&& !arg_cosp
)
13837 return build_complex (build_complex_type (type
),
13838 result_c
, result_s
);
13840 /* Dereference the sin/cos pointer arguments. */
13841 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13842 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13843 /* Proceed if valid pointer type were passed in. */
13844 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13845 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13847 /* Set the values. */
13848 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13850 TREE_SIDE_EFFECTS (result_s
) = 1;
13851 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13853 TREE_SIDE_EFFECTS (result_c
) = 1;
13854 /* Combine the assignments into a compound expr. */
13855 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13856 result_s
, result_c
));
13864 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13865 two-argument mpfr order N Bessel function FUNC on them and return
13866 the resulting value as a tree with type TYPE. The mpfr precision
13867 is set to the precision of TYPE. We assume that function FUNC
13868 returns zero if the result could be calculated exactly within the
13869 requested precision. */
13871 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13872 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13873 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13875 tree result
= NULL_TREE
;
13880 /* To proceed, MPFR must exactly represent the target floating point
13881 format, which only happens when the target base equals two. */
13882 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13883 && host_integerp (arg1
, 0)
13884 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13886 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13887 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13890 && real_isfinite (ra
)
13891 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13893 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13894 const int prec
= fmt
->p
;
13895 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13899 mpfr_init2 (m
, prec
);
13900 mpfr_from_real (m
, ra
, GMP_RNDN
);
13901 mpfr_clear_flags ();
13902 inexact
= func (m
, n
, m
, rnd
);
13903 result
= do_mpfr_ckconv (m
, type
, inexact
);
13911 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13912 the pointer *(ARG_QUO) and return the result. The type is taken
13913 from the type of ARG0 and is used for setting the precision of the
13914 calculation and results. */
13917 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13919 tree
const type
= TREE_TYPE (arg0
);
13920 tree result
= NULL_TREE
;
13925 /* To proceed, MPFR must exactly represent the target floating point
13926 format, which only happens when the target base equals two. */
13927 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13928 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13929 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13931 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13932 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13934 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13936 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13937 const int prec
= fmt
->p
;
13938 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13943 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13944 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13945 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13946 mpfr_clear_flags ();
13947 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13948 /* Remquo is independent of the rounding mode, so pass
13949 inexact=0 to do_mpfr_ckconv(). */
13950 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13951 mpfr_clears (m0
, m1
, NULL
);
13954 /* MPFR calculates quo in the host's long so it may
13955 return more bits in quo than the target int can hold
13956 if sizeof(host long) > sizeof(target int). This can
13957 happen even for native compilers in LP64 mode. In
13958 these cases, modulo the quo value with the largest
13959 number that the target int can hold while leaving one
13960 bit for the sign. */
13961 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13962 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13964 /* Dereference the quo pointer argument. */
13965 arg_quo
= build_fold_indirect_ref (arg_quo
);
13966 /* Proceed iff a valid pointer type was passed in. */
13967 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13969 /* Set the value. */
13971 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
13972 build_int_cst (TREE_TYPE (arg_quo
),
13974 TREE_SIDE_EFFECTS (result_quo
) = 1;
13975 /* Combine the quo assignment with the rem. */
13976 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13977 result_quo
, result_rem
));
13985 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13986 resulting value as a tree with type TYPE. The mpfr precision is
13987 set to the precision of TYPE. We assume that this mpfr function
13988 returns zero if the result could be calculated exactly within the
13989 requested precision. In addition, the integer pointer represented
13990 by ARG_SG will be dereferenced and set to the appropriate signgam
13994 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13996 tree result
= NULL_TREE
;
14000 /* To proceed, MPFR must exactly represent the target floating point
14001 format, which only happens when the target base equals two. Also
14002 verify ARG is a constant and that ARG_SG is an int pointer. */
14003 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
14004 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
14005 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
14006 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
14008 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
14010 /* In addition to NaN and Inf, the argument cannot be zero or a
14011 negative integer. */
14012 if (real_isfinite (ra
)
14013 && ra
->cl
!= rvc_zero
14014 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
14016 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
14017 const int prec
= fmt
->p
;
14018 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14023 mpfr_init2 (m
, prec
);
14024 mpfr_from_real (m
, ra
, GMP_RNDN
);
14025 mpfr_clear_flags ();
14026 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
14027 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
14033 /* Dereference the arg_sg pointer argument. */
14034 arg_sg
= build_fold_indirect_ref (arg_sg
);
14035 /* Assign the signgam value into *arg_sg. */
14036 result_sg
= fold_build2 (MODIFY_EXPR
,
14037 TREE_TYPE (arg_sg
), arg_sg
,
14038 build_int_cst (TREE_TYPE (arg_sg
), sg
));
14039 TREE_SIDE_EFFECTS (result_sg
) = 1;
14040 /* Combine the signgam assignment with the lgamma result. */
14041 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
14042 result_sg
, result_lg
));
14050 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14051 function FUNC on it and return the resulting value as a tree with
14052 type TYPE. The mpfr precision is set to the precision of TYPE. We
14053 assume that function FUNC returns zero if the result could be
14054 calculated exactly within the requested precision. */
14057 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
14059 tree result
= NULL_TREE
;
14063 /* To proceed, MPFR must exactly represent the target floating point
14064 format, which only happens when the target base equals two. */
14065 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
14066 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
14067 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
14069 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
14070 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
14072 if (real_isfinite (re
) && real_isfinite (im
))
14074 const struct real_format
*const fmt
=
14075 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14076 const int prec
= fmt
->p
;
14077 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14078 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14082 mpc_init2 (m
, prec
);
14083 mpfr_from_real (mpc_realref(m
), re
, rnd
);
14084 mpfr_from_real (mpc_imagref(m
), im
, rnd
);
14085 mpfr_clear_flags ();
14086 inexact
= func (m
, m
, crnd
);
14087 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
14095 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14096 mpc function FUNC on it and return the resulting value as a tree
14097 with type TYPE. The mpfr precision is set to the precision of
14098 TYPE. We assume that function FUNC returns zero if the result
14099 could be calculated exactly within the requested precision. If
14100 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14101 in the arguments and/or results. */
14104 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
14105 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
14107 tree result
= NULL_TREE
;
14112 /* To proceed, MPFR must exactly represent the target floating point
14113 format, which only happens when the target base equals two. */
14114 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
14115 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
14116 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
14117 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
14118 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
14120 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
14121 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
14122 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
14123 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
14126 || (real_isfinite (re0
) && real_isfinite (im0
)
14127 && real_isfinite (re1
) && real_isfinite (im1
)))
14129 const struct real_format
*const fmt
=
14130 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14131 const int prec
= fmt
->p
;
14132 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14133 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14137 mpc_init2 (m0
, prec
);
14138 mpc_init2 (m1
, prec
);
14139 mpfr_from_real (mpc_realref(m0
), re0
, rnd
);
14140 mpfr_from_real (mpc_imagref(m0
), im0
, rnd
);
14141 mpfr_from_real (mpc_realref(m1
), re1
, rnd
);
14142 mpfr_from_real (mpc_imagref(m1
), im1
, rnd
);
14143 mpfr_clear_flags ();
14144 inexact
= func (m0
, m0
, m1
, crnd
);
14145 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14154 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14155 a normal call should be emitted rather than expanding the function
14156 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14159 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14161 int nargs
= gimple_call_num_args (stmt
);
14163 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14165 ? gimple_call_arg_ptr (stmt
, 0)
14166 : &error_mark_node
), fcode
);
14169 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14170 a normal call should be emitted rather than expanding the function
14171 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14172 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14173 passed as second argument. */
14176 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14177 enum built_in_function fcode
)
14179 int nargs
= gimple_call_num_args (stmt
);
14181 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14183 ? gimple_call_arg_ptr (stmt
, 0)
14184 : &error_mark_node
), maxlen
, fcode
);
14187 /* Builtins with folding operations that operate on "..." arguments
14188 need special handling; we need to store the arguments in a convenient
14189 data structure before attempting any folding. Fortunately there are
14190 only a few builtins that fall into this category. FNDECL is the
14191 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14192 result of the function call is ignored. */
14195 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14196 bool ignore ATTRIBUTE_UNUSED
)
14198 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14199 tree ret
= NULL_TREE
;
14203 case BUILT_IN_SPRINTF_CHK
:
14204 case BUILT_IN_VSPRINTF_CHK
:
14205 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14208 case BUILT_IN_SNPRINTF_CHK
:
14209 case BUILT_IN_VSNPRINTF_CHK
:
14210 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14217 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14218 TREE_NO_WARNING (ret
) = 1;
14224 /* A wrapper function for builtin folding that prevents warnings for
14225 "statement without effect" and the like, caused by removing the
14226 call node earlier than the warning is generated. */
14229 fold_call_stmt (gimple stmt
, bool ignore
)
14231 tree ret
= NULL_TREE
;
14232 tree fndecl
= gimple_call_fndecl (stmt
);
14233 location_t loc
= gimple_location (stmt
);
14235 && TREE_CODE (fndecl
) == FUNCTION_DECL
14236 && DECL_BUILT_IN (fndecl
)
14237 && !gimple_call_va_arg_pack_p (stmt
))
14239 int nargs
= gimple_call_num_args (stmt
);
14240 tree
*args
= (nargs
> 0
14241 ? gimple_call_arg_ptr (stmt
, 0)
14242 : &error_mark_node
);
14244 if (avoid_folding_inline_builtin (fndecl
))
14246 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14248 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14252 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14253 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14255 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14258 /* Propagate location information from original call to
14259 expansion of builtin. Otherwise things like
14260 maybe_emit_chk_warning, that operate on the expansion
14261 of a builtin, will use the wrong location information. */
14262 if (gimple_has_location (stmt
))
14264 tree realret
= ret
;
14265 if (TREE_CODE (ret
) == NOP_EXPR
)
14266 realret
= TREE_OPERAND (ret
, 0);
14267 if (CAN_HAVE_LOCATION_P (realret
)
14268 && !EXPR_HAS_LOCATION (realret
))
14269 SET_EXPR_LOCATION (realret
, loc
);
14279 /* Look up the function in builtin_decl that corresponds to DECL
14280 and set ASMSPEC as its user assembler name. DECL must be a
14281 function decl that declares a builtin. */
14284 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14287 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14288 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14291 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14292 set_user_assembler_name (builtin
, asmspec
);
14293 switch (DECL_FUNCTION_CODE (decl
))
14295 case BUILT_IN_MEMCPY
:
14296 init_block_move_fn (asmspec
);
14297 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14299 case BUILT_IN_MEMSET
:
14300 init_block_clear_fn (asmspec
);
14301 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14303 case BUILT_IN_MEMMOVE
:
14304 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14306 case BUILT_IN_MEMCMP
:
14307 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14309 case BUILT_IN_ABORT
:
14310 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14313 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14315 set_user_assembler_libfunc ("ffs", asmspec
);
14316 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14317 MODE_INT
, 0), "ffs");
14325 /* Return true if DECL is a builtin that expands to a constant or similarly
14328 is_simple_builtin (tree decl
)
14330 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14331 switch (DECL_FUNCTION_CODE (decl
))
14333 /* Builtins that expand to constants. */
14334 case BUILT_IN_CONSTANT_P
:
14335 case BUILT_IN_EXPECT
:
14336 case BUILT_IN_OBJECT_SIZE
:
14337 case BUILT_IN_UNREACHABLE
:
14338 /* Simple register moves or loads from stack. */
14339 case BUILT_IN_ASSUME_ALIGNED
:
14340 case BUILT_IN_RETURN_ADDRESS
:
14341 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14342 case BUILT_IN_FROB_RETURN_ADDR
:
14343 case BUILT_IN_RETURN
:
14344 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14345 case BUILT_IN_FRAME_ADDRESS
:
14346 case BUILT_IN_VA_END
:
14347 case BUILT_IN_STACK_SAVE
:
14348 case BUILT_IN_STACK_RESTORE
:
14349 /* Exception state returns or moves registers around. */
14350 case BUILT_IN_EH_FILTER
:
14351 case BUILT_IN_EH_POINTER
:
14352 case BUILT_IN_EH_COPY_VALUES
:
14362 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14363 most probably expanded inline into reasonably simple code. This is a
14364 superset of is_simple_builtin. */
14366 is_inexpensive_builtin (tree decl
)
14370 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14372 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14373 switch (DECL_FUNCTION_CODE (decl
))
14376 case BUILT_IN_ALLOCA
:
14377 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14378 case BUILT_IN_BSWAP16
:
14379 case BUILT_IN_BSWAP32
:
14380 case BUILT_IN_BSWAP64
:
14382 case BUILT_IN_CLZIMAX
:
14383 case BUILT_IN_CLZL
:
14384 case BUILT_IN_CLZLL
:
14386 case BUILT_IN_CTZIMAX
:
14387 case BUILT_IN_CTZL
:
14388 case BUILT_IN_CTZLL
:
14390 case BUILT_IN_FFSIMAX
:
14391 case BUILT_IN_FFSL
:
14392 case BUILT_IN_FFSLL
:
14393 case BUILT_IN_IMAXABS
:
14394 case BUILT_IN_FINITE
:
14395 case BUILT_IN_FINITEF
:
14396 case BUILT_IN_FINITEL
:
14397 case BUILT_IN_FINITED32
:
14398 case BUILT_IN_FINITED64
:
14399 case BUILT_IN_FINITED128
:
14400 case BUILT_IN_FPCLASSIFY
:
14401 case BUILT_IN_ISFINITE
:
14402 case BUILT_IN_ISINF_SIGN
:
14403 case BUILT_IN_ISINF
:
14404 case BUILT_IN_ISINFF
:
14405 case BUILT_IN_ISINFL
:
14406 case BUILT_IN_ISINFD32
:
14407 case BUILT_IN_ISINFD64
:
14408 case BUILT_IN_ISINFD128
:
14409 case BUILT_IN_ISNAN
:
14410 case BUILT_IN_ISNANF
:
14411 case BUILT_IN_ISNANL
:
14412 case BUILT_IN_ISNAND32
:
14413 case BUILT_IN_ISNAND64
:
14414 case BUILT_IN_ISNAND128
:
14415 case BUILT_IN_ISNORMAL
:
14416 case BUILT_IN_ISGREATER
:
14417 case BUILT_IN_ISGREATEREQUAL
:
14418 case BUILT_IN_ISLESS
:
14419 case BUILT_IN_ISLESSEQUAL
:
14420 case BUILT_IN_ISLESSGREATER
:
14421 case BUILT_IN_ISUNORDERED
:
14422 case BUILT_IN_VA_ARG_PACK
:
14423 case BUILT_IN_VA_ARG_PACK_LEN
:
14424 case BUILT_IN_VA_COPY
:
14425 case BUILT_IN_TRAP
:
14426 case BUILT_IN_SAVEREGS
:
14427 case BUILT_IN_POPCOUNTL
:
14428 case BUILT_IN_POPCOUNTLL
:
14429 case BUILT_IN_POPCOUNTIMAX
:
14430 case BUILT_IN_POPCOUNT
:
14431 case BUILT_IN_PARITYL
:
14432 case BUILT_IN_PARITYLL
:
14433 case BUILT_IN_PARITYIMAX
:
14434 case BUILT_IN_PARITY
:
14435 case BUILT_IN_LABS
:
14436 case BUILT_IN_LLABS
:
14437 case BUILT_IN_PREFETCH
:
14441 return is_simple_builtin (decl
);