1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
40 #include "typeclass.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
47 #include "value-prof.h"
48 #include "diagnostic-core.h"
53 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
55 struct target_builtins default_target_builtins
;
57 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
60 /* Define the names of the builtin function types and codes. */
61 const char *const built_in_class_names
[BUILT_IN_LAST
]
62 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
65 const char * built_in_names
[(int) END_BUILTINS
] =
67 #include "builtins.def"
71 /* Setup an array of _DECL trees, make sure each element is
72 initialized to NULL_TREE. */
73 builtin_info_type builtin_info
;
75 /* Non-zero if __builtin_constant_p should be folded right away. */
76 bool force_folding_builtin_constant_p
;
78 static const char *c_getstr (tree
);
79 static rtx
c_readstr (const char *, enum machine_mode
);
80 static int target_char_cast (tree
, char *);
81 static rtx
get_memory_rtx (tree
, tree
);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx
result_vector (int, rtx
);
87 static void expand_builtin_update_setjmp_buf (rtx
);
88 static void expand_builtin_prefetch (tree
);
89 static rtx
expand_builtin_apply_args (void);
90 static rtx
expand_builtin_apply_args_1 (void);
91 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
92 static void expand_builtin_return (rtx
);
93 static enum type_class
type_to_class (tree
);
94 static rtx
expand_builtin_classify_type (tree
);
95 static void expand_errno_check (tree
, rtx
);
96 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
101 static rtx
expand_builtin_sincos (tree
);
102 static rtx
expand_builtin_cexpi (tree
, rtx
);
103 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
104 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
105 static rtx
expand_builtin_next_arg (void);
106 static rtx
expand_builtin_va_start (tree
);
107 static rtx
expand_builtin_va_end (tree
);
108 static rtx
expand_builtin_va_copy (tree
);
109 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strcmp (tree
, rtx
);
111 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
112 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
113 static rtx
expand_builtin_memcpy (tree
, rtx
);
114 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
116 enum machine_mode
, int);
117 static rtx
expand_builtin_strcpy (tree
, rtx
);
118 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
119 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strncpy (tree
, rtx
);
121 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
123 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
124 static rtx
expand_builtin_bzero (tree
);
125 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_alloca (tree
, bool);
127 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
128 static rtx
expand_builtin_frame_address (tree
, tree
);
129 static tree
stabilize_va_list_loc (location_t
, tree
, int);
130 static rtx
expand_builtin_expect (tree
, rtx
);
131 static tree
fold_builtin_constant_p (tree
);
132 static tree
fold_builtin_expect (location_t
, tree
, tree
);
133 static tree
fold_builtin_classify_type (tree
);
134 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
135 static tree
fold_builtin_inf (location_t
, tree
, int);
136 static tree
fold_builtin_nan (tree
, tree
, int);
137 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
138 static bool validate_arg (const_tree
, enum tree_code code
);
139 static bool integer_valued_real_p (tree
);
140 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
141 static bool readonly_data_expr (tree
);
142 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
143 static rtx
expand_builtin_signbit (tree
, rtx
);
144 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
145 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
146 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
147 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
148 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
149 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
150 static tree
fold_builtin_tan (tree
, tree
);
151 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
152 static tree
fold_builtin_floor (location_t
, tree
, tree
);
153 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
154 static tree
fold_builtin_round (location_t
, tree
, tree
);
155 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
156 static tree
fold_builtin_bitop (tree
, tree
);
157 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
158 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
160 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
162 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
164 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
165 static tree
fold_builtin_isascii (location_t
, tree
);
166 static tree
fold_builtin_toascii (location_t
, tree
);
167 static tree
fold_builtin_isdigit (location_t
, tree
);
168 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
169 static tree
fold_builtin_abs (location_t
, tree
, tree
);
170 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
172 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
173 static tree
fold_builtin_0 (location_t
, tree
, bool);
174 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
175 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
176 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
177 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
178 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
180 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
181 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
182 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
184 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
186 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
187 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
188 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
190 static rtx
expand_builtin_object_size (tree
);
191 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
192 enum built_in_function
);
193 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
194 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
195 static void maybe_emit_free_warning (tree
);
196 static tree
fold_builtin_object_size (tree
, tree
);
197 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
198 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
199 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
200 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
201 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
202 enum built_in_function
);
203 static bool init_target_chars (void);
205 static unsigned HOST_WIDE_INT target_newline
;
206 static unsigned HOST_WIDE_INT target_percent
;
207 static unsigned HOST_WIDE_INT target_c
;
208 static unsigned HOST_WIDE_INT target_s
;
209 static char target_percent_c
[3];
210 static char target_percent_s
[3];
211 static char target_percent_s_newline
[4];
212 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
213 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
214 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
215 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
216 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_sincos (tree
, tree
, tree
);
219 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
220 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
221 const REAL_VALUE_TYPE
*, bool);
222 static tree
do_mpfr_remquo (tree
, tree
, tree
);
223 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
224 static void expand_builtin_sync_synchronize (void);
226 /* Return true if NAME starts with __builtin_ or __sync_. */
229 is_builtin_name (const char *name
)
231 if (strncmp (name
, "__builtin_", 10) == 0)
233 if (strncmp (name
, "__sync_", 7) == 0)
235 if (strncmp (name
, "__atomic_", 9) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl
)
246 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
249 /* By default we assume that c99 functions are present at the runtime,
250 but sincos is not. */
252 default_libc_has_function (enum function_class fn_class
)
254 if (fn_class
== function_c94
255 || fn_class
== function_c99_misc
256 || fn_class
== function_c99_math_complex
)
263 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
269 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
274 /* Return true if NODE should be considered for inline expansion regardless
275 of the optimization level. This means whenever a function is invoked with
276 its "internal" name, which normally contains the prefix "__builtin". */
279 called_as_built_in (tree node
)
281 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
282 we want the name used to call the function, not the name it
284 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
285 return is_builtin_name (name
);
288 /* Compute values M and N such that M divides (address of EXP - N) and such
289 that N < M. If these numbers can be determined, store M in alignp and N in
290 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
291 *alignp and any bit-offset to *bitposp.
293 Note that the address (and thus the alignment) computed here is based
294 on the address to which a symbol resolves, whereas DECL_ALIGN is based
295 on the address at which an object is actually located. These two
296 addresses are not always the same. For example, on ARM targets,
297 the address &foo of a Thumb function foo() has the lowest bit set,
298 whereas foo() itself starts on an even address.
300 If ADDR_P is true we are taking the address of the memory reference EXP
301 and thus cannot rely on the access taking place. */
304 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
305 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
307 HOST_WIDE_INT bitsize
, bitpos
;
309 enum machine_mode mode
;
310 int unsignedp
, volatilep
;
311 unsigned int inner
, align
= BITS_PER_UNIT
;
312 bool known_alignment
= false;
314 /* Get the innermost object and the constant (bitpos) and possibly
315 variable (offset) offset of the access. */
316 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
317 &mode
, &unsignedp
, &volatilep
, true);
319 /* Extract alignment information from the innermost object and
320 possibly adjust bitpos and offset. */
321 if (TREE_CODE (exp
) == FUNCTION_DECL
)
323 /* Function addresses can encode extra information besides their
324 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
325 allows the low bit to be used as a virtual bit, we know
326 that the address itself must be at least 2-byte aligned. */
327 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
328 align
= 2 * BITS_PER_UNIT
;
330 else if (TREE_CODE (exp
) == LABEL_DECL
)
332 else if (TREE_CODE (exp
) == CONST_DECL
)
334 /* The alignment of a CONST_DECL is determined by its initializer. */
335 exp
= DECL_INITIAL (exp
);
336 align
= TYPE_ALIGN (TREE_TYPE (exp
));
337 #ifdef CONSTANT_ALIGNMENT
338 if (CONSTANT_CLASS_P (exp
))
339 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
341 known_alignment
= true;
343 else if (DECL_P (exp
))
345 align
= DECL_ALIGN (exp
);
346 known_alignment
= true;
348 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
350 align
= TYPE_ALIGN (TREE_TYPE (exp
));
352 else if (TREE_CODE (exp
) == INDIRECT_REF
353 || TREE_CODE (exp
) == MEM_REF
354 || TREE_CODE (exp
) == TARGET_MEM_REF
)
356 tree addr
= TREE_OPERAND (exp
, 0);
358 unsigned HOST_WIDE_INT ptr_bitpos
;
360 if (TREE_CODE (addr
) == BIT_AND_EXPR
361 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
363 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
364 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
365 align
*= BITS_PER_UNIT
;
366 addr
= TREE_OPERAND (addr
, 0);
370 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
371 align
= MAX (ptr_align
, align
);
373 /* The alignment of the pointer operand in a TARGET_MEM_REF
374 has to take the variable offset parts into account. */
375 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
379 unsigned HOST_WIDE_INT step
= 1;
381 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
382 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
384 if (TMR_INDEX2 (exp
))
385 align
= BITS_PER_UNIT
;
386 known_alignment
= false;
389 /* When EXP is an actual memory reference then we can use
390 TYPE_ALIGN of a pointer indirection to derive alignment.
391 Do so only if get_pointer_alignment_1 did not reveal absolute
392 alignment knowledge and if using that alignment would
393 improve the situation. */
394 if (!addr_p
&& !known_alignment
395 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
396 align
= TYPE_ALIGN (TREE_TYPE (exp
));
399 /* Else adjust bitpos accordingly. */
400 bitpos
+= ptr_bitpos
;
401 if (TREE_CODE (exp
) == MEM_REF
402 || TREE_CODE (exp
) == TARGET_MEM_REF
)
403 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
406 else if (TREE_CODE (exp
) == STRING_CST
)
408 /* STRING_CST are the only constant objects we allow to be not
409 wrapped inside a CONST_DECL. */
410 align
= TYPE_ALIGN (TREE_TYPE (exp
));
411 #ifdef CONSTANT_ALIGNMENT
412 if (CONSTANT_CLASS_P (exp
))
413 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
415 known_alignment
= true;
418 /* If there is a non-constant offset part extract the maximum
419 alignment that can prevail. */
425 if (TREE_CODE (offset
) == PLUS_EXPR
)
427 next_offset
= TREE_OPERAND (offset
, 0);
428 offset
= TREE_OPERAND (offset
, 1);
432 if (host_integerp (offset
, 1))
434 /* Any overflow in calculating offset_bits won't change
437 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
440 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
442 else if (TREE_CODE (offset
) == MULT_EXPR
443 && host_integerp (TREE_OPERAND (offset
, 1), 1))
445 /* Any overflow in calculating offset_factor won't change
447 unsigned offset_factor
448 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
452 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
456 inner
= MIN (inner
, BITS_PER_UNIT
);
459 offset
= next_offset
;
461 /* Alignment is innermost object alignment adjusted by the constant
462 and non-constant offset parts. */
463 align
= MIN (align
, inner
);
466 *bitposp
= bitpos
& (*alignp
- 1);
467 return known_alignment
;
470 /* For a memory reference expression EXP compute values M and N such that M
471 divides (&EXP - N) and such that N < M. If these numbers can be determined,
472 store M in alignp and N in *BITPOSP and return true. Otherwise return false
473 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
476 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
477 unsigned HOST_WIDE_INT
*bitposp
)
479 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
482 /* Return the alignment in bits of EXP, an object. */
485 get_object_alignment (tree exp
)
487 unsigned HOST_WIDE_INT bitpos
= 0;
490 get_object_alignment_1 (exp
, &align
, &bitpos
);
492 /* align and bitpos now specify known low bits of the pointer.
493 ptr & (align - 1) == bitpos. */
496 align
= (bitpos
& -bitpos
);
500 /* For a pointer valued expression EXP compute values M and N such that M
501 divides (EXP - N) and such that N < M. If these numbers can be determined,
502 store M in alignp and N in *BITPOSP and return true. Return false if
503 the results are just a conservative approximation.
505 If EXP is not a pointer, false is returned too. */
508 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
509 unsigned HOST_WIDE_INT
*bitposp
)
513 if (TREE_CODE (exp
) == ADDR_EXPR
)
514 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
515 alignp
, bitposp
, true);
516 else if (TREE_CODE (exp
) == SSA_NAME
517 && POINTER_TYPE_P (TREE_TYPE (exp
)))
519 unsigned int ptr_align
, ptr_misalign
;
520 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
522 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
524 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
525 *alignp
= ptr_align
* BITS_PER_UNIT
;
526 /* We cannot really tell whether this result is an approximation. */
532 *alignp
= BITS_PER_UNIT
;
536 else if (TREE_CODE (exp
) == INTEGER_CST
)
538 *alignp
= BIGGEST_ALIGNMENT
;
539 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
540 & (BIGGEST_ALIGNMENT
- 1));
545 *alignp
= BITS_PER_UNIT
;
549 /* Return the alignment in bits of EXP, a pointer valued expression.
550 The alignment returned is, by default, the alignment of the thing that
551 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
553 Otherwise, look at the expression to see if we can do better, i.e., if the
554 expression is actually pointing at an object whose alignment is tighter. */
557 get_pointer_alignment (tree exp
)
559 unsigned HOST_WIDE_INT bitpos
= 0;
562 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
568 align
= (bitpos
& -bitpos
);
573 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
574 way, because it could contain a zero byte in the middle.
575 TREE_STRING_LENGTH is the size of the character array, not the string.
577 ONLY_VALUE should be nonzero if the result is not going to be emitted
578 into the instruction stream and zero if it is going to be expanded.
579 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
580 is returned, otherwise NULL, since
581 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
582 evaluate the side-effects.
584 The value returned is of type `ssizetype'.
586 Unfortunately, string_constant can't access the values of const char
587 arrays with initializers, so neither can we do so here. */
590 c_strlen (tree src
, int only_value
)
593 HOST_WIDE_INT offset
;
599 if (TREE_CODE (src
) == COND_EXPR
600 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
604 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
605 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
606 if (tree_int_cst_equal (len1
, len2
))
610 if (TREE_CODE (src
) == COMPOUND_EXPR
611 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
612 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
614 loc
= EXPR_LOC_OR_HERE (src
);
616 src
= string_constant (src
, &offset_node
);
620 max
= TREE_STRING_LENGTH (src
) - 1;
621 ptr
= TREE_STRING_POINTER (src
);
623 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
625 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
626 compute the offset to the following null if we don't know where to
627 start searching for it. */
630 for (i
= 0; i
< max
; i
++)
634 /* We don't know the starting offset, but we do know that the string
635 has no internal zero bytes. We can assume that the offset falls
636 within the bounds of the string; otherwise, the programmer deserves
637 what he gets. Subtract the offset from the length of the string,
638 and return that. This would perhaps not be valid if we were dealing
639 with named arrays in addition to literal string constants. */
641 return size_diffop_loc (loc
, size_int (max
), offset_node
);
644 /* We have a known offset into the string. Start searching there for
645 a null character if we can represent it as a single HOST_WIDE_INT. */
646 if (offset_node
== 0)
648 else if (! host_integerp (offset_node
, 0))
651 offset
= tree_low_cst (offset_node
, 0);
653 /* If the offset is known to be out of bounds, warn, and call strlen at
655 if (offset
< 0 || offset
> max
)
657 /* Suppress multiple warnings for propagated constant strings. */
658 if (! TREE_NO_WARNING (src
))
660 warning_at (loc
, 0, "offset outside bounds of constant string");
661 TREE_NO_WARNING (src
) = 1;
666 /* Use strlen to search for the first zero byte. Since any strings
667 constructed with build_string will have nulls appended, we win even
668 if we get handed something like (char[4])"abcd".
670 Since OFFSET is our starting index into the string, no further
671 calculation is needed. */
672 return ssize_int (strlen (ptr
+ offset
));
675 /* Return a char pointer for a C string if it is a string constant
676 or sum of string constant and integer constant. */
683 src
= string_constant (src
, &offset_node
);
687 if (offset_node
== 0)
688 return TREE_STRING_POINTER (src
);
689 else if (!host_integerp (offset_node
, 1)
690 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
693 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
696 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
697 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
700 c_readstr (const char *str
, enum machine_mode mode
)
706 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
711 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
714 if (WORDS_BIG_ENDIAN
)
715 j
= GET_MODE_SIZE (mode
) - i
- 1;
716 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
717 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
718 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
720 gcc_assert (j
< HOST_BITS_PER_DOUBLE_INT
);
723 ch
= (unsigned char) str
[i
];
724 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
726 return immed_double_const (c
[0], c
[1], mode
);
729 /* Cast a target constant CST to target CHAR and if that value fits into
730 host char type, return zero and put that value into variable pointed to by
734 target_char_cast (tree cst
, char *p
)
736 unsigned HOST_WIDE_INT val
, hostval
;
738 if (TREE_CODE (cst
) != INTEGER_CST
739 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
742 val
= TREE_INT_CST_LOW (cst
);
743 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
744 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
747 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
748 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
757 /* Similar to save_expr, but assumes that arbitrary code is not executed
758 in between the multiple evaluations. In particular, we assume that a
759 non-addressable local variable will not be modified. */
762 builtin_save_expr (tree exp
)
764 if (TREE_CODE (exp
) == SSA_NAME
765 || (TREE_ADDRESSABLE (exp
) == 0
766 && (TREE_CODE (exp
) == PARM_DECL
767 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
770 return save_expr (exp
);
773 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
774 times to get the address of either a higher stack frame, or a return
775 address located within it (depending on FNDECL_CODE). */
778 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
782 #ifdef INITIAL_FRAME_ADDRESS_RTX
783 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
787 /* For a zero count with __builtin_return_address, we don't care what
788 frame address we return, because target-specific definitions will
789 override us. Therefore frame pointer elimination is OK, and using
790 the soft frame pointer is OK.
792 For a nonzero count, or a zero count with __builtin_frame_address,
793 we require a stable offset from the current frame pointer to the
794 previous one, so we must use the hard frame pointer, and
795 we must disable frame pointer elimination. */
796 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
797 tem
= frame_pointer_rtx
;
800 tem
= hard_frame_pointer_rtx
;
802 /* Tell reload not to eliminate the frame pointer. */
803 crtl
->accesses_prior_frames
= 1;
807 /* Some machines need special handling before we can access
808 arbitrary frames. For example, on the SPARC, we must first flush
809 all register windows to the stack. */
810 #ifdef SETUP_FRAME_ADDRESSES
812 SETUP_FRAME_ADDRESSES ();
815 /* On the SPARC, the return address is not in the frame, it is in a
816 register. There is no way to access it off of the current frame
817 pointer, but it can be accessed off the previous frame pointer by
818 reading the value from the register window save area. */
819 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
820 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
824 /* Scan back COUNT frames to the specified frame. */
825 for (i
= 0; i
< count
; i
++)
827 /* Assume the dynamic chain pointer is in the word that the
828 frame address points to, unless otherwise specified. */
829 #ifdef DYNAMIC_CHAIN_ADDRESS
830 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
832 tem
= memory_address (Pmode
, tem
);
833 tem
= gen_frame_mem (Pmode
, tem
);
834 tem
= copy_to_reg (tem
);
837 /* For __builtin_frame_address, return what we've got. But, on
838 the SPARC for example, we may have to add a bias. */
839 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
840 #ifdef FRAME_ADDR_RTX
841 return FRAME_ADDR_RTX (tem
);
846 /* For __builtin_return_address, get the return address from that frame. */
847 #ifdef RETURN_ADDR_RTX
848 tem
= RETURN_ADDR_RTX (count
, tem
);
850 tem
= memory_address (Pmode
,
851 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
852 tem
= gen_frame_mem (Pmode
, tem
);
857 /* Alias set used for setjmp buffer. */
858 static alias_set_type setjmp_alias_set
= -1;
860 /* Construct the leading half of a __builtin_setjmp call. Control will
861 return to RECEIVER_LABEL. This is also called directly by the SJLJ
862 exception handling code. */
865 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
867 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
871 if (setjmp_alias_set
== -1)
872 setjmp_alias_set
= new_alias_set ();
874 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
876 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
878 /* We store the frame pointer and the address of receiver_label in
879 the buffer and use the rest of it for the stack save area, which
880 is machine-dependent. */
882 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
883 set_mem_alias_set (mem
, setjmp_alias_set
);
884 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
886 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
887 GET_MODE_SIZE (Pmode
))),
888 set_mem_alias_set (mem
, setjmp_alias_set
);
890 emit_move_insn (validize_mem (mem
),
891 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
893 stack_save
= gen_rtx_MEM (sa_mode
,
894 plus_constant (Pmode
, buf_addr
,
895 2 * GET_MODE_SIZE (Pmode
)));
896 set_mem_alias_set (stack_save
, setjmp_alias_set
);
897 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
899 /* If there is further processing to do, do it. */
900 #ifdef HAVE_builtin_setjmp_setup
901 if (HAVE_builtin_setjmp_setup
)
902 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
905 /* We have a nonlocal label. */
906 cfun
->has_nonlocal_label
= 1;
909 /* Construct the trailing part of a __builtin_setjmp call. This is
910 also called directly by the SJLJ exception handling code.
911 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
914 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
918 /* Mark the FP as used when we get here, so we have to make sure it's
919 marked as used by this function. */
920 emit_use (hard_frame_pointer_rtx
);
922 /* Mark the static chain as clobbered here so life information
923 doesn't get messed up for it. */
924 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
925 if (chain
&& REG_P (chain
))
926 emit_clobber (chain
);
928 /* Now put in the code to restore the frame pointer, and argument
929 pointer, if needed. */
930 #ifdef HAVE_nonlocal_goto
931 if (! HAVE_nonlocal_goto
)
933 /* First adjust our frame pointer to its actual value. It was
934 previously set to the start of the virtual area corresponding to
935 the stacked variables when we branched here and now needs to be
936 adjusted to the actual hardware fp value.
938 Assignments to virtual registers are converted by
939 instantiate_virtual_regs into the corresponding assignment
940 to the underlying register (fp in this case) that makes
941 the original assignment true.
942 So the following insn will actually be decrementing fp by
943 STARTING_FRAME_OFFSET. */
944 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
946 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
947 if (fixed_regs
[ARG_POINTER_REGNUM
])
949 #ifdef ELIMINABLE_REGS
950 /* If the argument pointer can be eliminated in favor of the
951 frame pointer, we don't need to restore it. We assume here
952 that if such an elimination is present, it can always be used.
953 This is the case on all known machines; if we don't make this
954 assumption, we do unnecessary saving on many machines. */
956 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
958 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
959 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
960 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
963 if (i
== ARRAY_SIZE (elim_regs
))
966 /* Now restore our arg pointer from the address at which it
967 was saved in our stack frame. */
968 emit_move_insn (crtl
->args
.internal_arg_pointer
,
969 copy_to_reg (get_arg_pointer_save_area ()));
974 #ifdef HAVE_builtin_setjmp_receiver
975 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
976 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
979 #ifdef HAVE_nonlocal_goto_receiver
980 if (HAVE_nonlocal_goto_receiver
)
981 emit_insn (gen_nonlocal_goto_receiver ());
986 /* We must not allow the code we just generated to be reordered by
987 scheduling. Specifically, the update of the frame pointer must
988 happen immediately, not later. Similarly, we must block
989 (frame-related) register values to be used across this code. */
990 emit_insn (gen_blockage ());
993 /* __builtin_longjmp is passed a pointer to an array of five words (not
994 all will be used on all machines). It operates similarly to the C
995 library function of the same name, but is more efficient. Much of
996 the code below is copied from the handling of non-local gotos. */
999 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1001 rtx fp
, lab
, stack
, insn
, last
;
1002 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1004 /* DRAP is needed for stack realign if longjmp is expanded to current
1006 if (SUPPORTS_STACK_ALIGNMENT
)
1007 crtl
->need_drap
= true;
1009 if (setjmp_alias_set
== -1)
1010 setjmp_alias_set
= new_alias_set ();
1012 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1014 buf_addr
= force_reg (Pmode
, buf_addr
);
1016 /* We require that the user must pass a second argument of 1, because
1017 that is what builtin_setjmp will return. */
1018 gcc_assert (value
== const1_rtx
);
1020 last
= get_last_insn ();
1021 #ifdef HAVE_builtin_longjmp
1022 if (HAVE_builtin_longjmp
)
1023 emit_insn (gen_builtin_longjmp (buf_addr
));
1027 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1028 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1029 GET_MODE_SIZE (Pmode
)));
1031 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1032 2 * GET_MODE_SIZE (Pmode
)));
1033 set_mem_alias_set (fp
, setjmp_alias_set
);
1034 set_mem_alias_set (lab
, setjmp_alias_set
);
1035 set_mem_alias_set (stack
, setjmp_alias_set
);
1037 /* Pick up FP, label, and SP from the block and jump. This code is
1038 from expand_goto in stmt.c; see there for detailed comments. */
1039 #ifdef HAVE_nonlocal_goto
1040 if (HAVE_nonlocal_goto
)
1041 /* We have to pass a value to the nonlocal_goto pattern that will
1042 get copied into the static_chain pointer, but it does not matter
1043 what that value is, because builtin_setjmp does not use it. */
1044 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1048 lab
= copy_to_reg (lab
);
1050 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1051 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1053 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1054 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1056 emit_use (hard_frame_pointer_rtx
);
1057 emit_use (stack_pointer_rtx
);
1058 emit_indirect_jump (lab
);
1062 /* Search backwards and mark the jump insn as a non-local goto.
1063 Note that this precludes the use of __builtin_longjmp to a
1064 __builtin_setjmp target in the same function. However, we've
1065 already cautioned the user that these functions are for
1066 internal exception handling use only. */
1067 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1069 gcc_assert (insn
!= last
);
1073 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1076 else if (CALL_P (insn
))
1081 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1082 and the address of the save area. */
1085 expand_builtin_nonlocal_goto (tree exp
)
1087 tree t_label
, t_save_area
;
1088 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1090 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1093 t_label
= CALL_EXPR_ARG (exp
, 0);
1094 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1096 r_label
= expand_normal (t_label
);
1097 r_label
= convert_memory_address (Pmode
, r_label
);
1098 r_save_area
= expand_normal (t_save_area
);
1099 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1100 /* Copy the address of the save location to a register just in case it was
1101 based on the frame pointer. */
1102 r_save_area
= copy_to_reg (r_save_area
);
1103 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1104 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1105 plus_constant (Pmode
, r_save_area
,
1106 GET_MODE_SIZE (Pmode
)));
1108 crtl
->has_nonlocal_goto
= 1;
1110 #ifdef HAVE_nonlocal_goto
1111 /* ??? We no longer need to pass the static chain value, afaik. */
1112 if (HAVE_nonlocal_goto
)
1113 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1117 r_label
= copy_to_reg (r_label
);
1119 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1120 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1122 /* Restore frame pointer for containing function. */
1123 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1124 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1126 /* USE of hard_frame_pointer_rtx added for consistency;
1127 not clear if really needed. */
1128 emit_use (hard_frame_pointer_rtx
);
1129 emit_use (stack_pointer_rtx
);
1131 /* If the architecture is using a GP register, we must
1132 conservatively assume that the target function makes use of it.
1133 The prologue of functions with nonlocal gotos must therefore
1134 initialize the GP register to the appropriate value, and we
1135 must then make sure that this value is live at the point
1136 of the jump. (Note that this doesn't necessarily apply
1137 to targets with a nonlocal_goto pattern; they are free
1138 to implement it in their own way. Note also that this is
1139 a no-op if the GP register is a global invariant.) */
1140 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1141 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1142 emit_use (pic_offset_table_rtx
);
1144 emit_indirect_jump (r_label
);
1147 /* Search backwards to the jump insn and mark it as a
1149 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1153 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1156 else if (CALL_P (insn
))
1163 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1164 (not all will be used on all machines) that was passed to __builtin_setjmp.
1165 It updates the stack pointer in that block to correspond to the current
1169 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1171 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1173 = gen_rtx_MEM (sa_mode
,
1176 plus_constant (Pmode
, buf_addr
,
1177 2 * GET_MODE_SIZE (Pmode
))));
1179 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1182 /* Expand a call to __builtin_prefetch. For a target that does not support
1183 data prefetch, evaluate the memory address argument in case it has side
1187 expand_builtin_prefetch (tree exp
)
1189 tree arg0
, arg1
, arg2
;
1193 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1196 arg0
= CALL_EXPR_ARG (exp
, 0);
1198 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1199 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1201 nargs
= call_expr_nargs (exp
);
1203 arg1
= CALL_EXPR_ARG (exp
, 1);
1205 arg1
= integer_zero_node
;
1207 arg2
= CALL_EXPR_ARG (exp
, 2);
1209 arg2
= integer_three_node
;
1211 /* Argument 0 is an address. */
1212 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1214 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1215 if (TREE_CODE (arg1
) != INTEGER_CST
)
1217 error ("second argument to %<__builtin_prefetch%> must be a constant");
1218 arg1
= integer_zero_node
;
1220 op1
= expand_normal (arg1
);
1221 /* Argument 1 must be either zero or one. */
1222 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1224 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1229 /* Argument 2 (locality) must be a compile-time constant int. */
1230 if (TREE_CODE (arg2
) != INTEGER_CST
)
1232 error ("third argument to %<__builtin_prefetch%> must be a constant");
1233 arg2
= integer_zero_node
;
1235 op2
= expand_normal (arg2
);
1236 /* Argument 2 must be 0, 1, 2, or 3. */
1237 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1239 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 #ifdef HAVE_prefetch
1246 struct expand_operand ops
[3];
1248 create_address_operand (&ops
[0], op0
);
1249 create_integer_operand (&ops
[1], INTVAL (op1
));
1250 create_integer_operand (&ops
[2], INTVAL (op2
));
1251 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1256 /* Don't do anything with direct references to volatile memory, but
1257 generate code to handle other side effects. */
1258 if (!MEM_P (op0
) && side_effects_p (op0
))
1262 /* Get a MEM rtx for expression EXP which is the address of an operand
1263 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1264 the maximum length of the block of memory that might be accessed or
1268 get_memory_rtx (tree exp
, tree len
)
1270 tree orig_exp
= exp
;
1273 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1274 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1275 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1276 exp
= TREE_OPERAND (exp
, 0);
1278 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1279 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1281 /* Get an expression we can use to find the attributes to assign to MEM.
1282 First remove any nops. */
1283 while (CONVERT_EXPR_P (exp
)
1284 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1285 exp
= TREE_OPERAND (exp
, 0);
1287 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1288 (as builtin stringops may alias with anything). */
1289 exp
= fold_build2 (MEM_REF
,
1290 build_array_type (char_type_node
,
1291 build_range_type (sizetype
,
1292 size_one_node
, len
)),
1293 exp
, build_int_cst (ptr_type_node
, 0));
1295 /* If the MEM_REF has no acceptable address, try to get the base object
1296 from the original address we got, and build an all-aliasing
1297 unknown-sized access to that one. */
1298 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1299 set_mem_attributes (mem
, exp
, 0);
1300 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1301 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1304 exp
= build_fold_addr_expr (exp
);
1305 exp
= fold_build2 (MEM_REF
,
1306 build_array_type (char_type_node
,
1307 build_range_type (sizetype
,
1310 exp
, build_int_cst (ptr_type_node
, 0));
1311 set_mem_attributes (mem
, exp
, 0);
1313 set_mem_alias_set (mem
, 0);
1317 /* Built-in functions to perform an untyped call and return. */
1319 #define apply_args_mode \
1320 (this_target_builtins->x_apply_args_mode)
1321 #define apply_result_mode \
1322 (this_target_builtins->x_apply_result_mode)
1324 /* Return the size required for the block returned by __builtin_apply_args,
1325 and initialize apply_args_mode. */
1328 apply_args_size (void)
1330 static int size
= -1;
1333 enum machine_mode mode
;
1335 /* The values computed by this function never change. */
1338 /* The first value is the incoming arg-pointer. */
1339 size
= GET_MODE_SIZE (Pmode
);
1341 /* The second value is the structure value address unless this is
1342 passed as an "invisible" first argument. */
1343 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1344 size
+= GET_MODE_SIZE (Pmode
);
1346 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1347 if (FUNCTION_ARG_REGNO_P (regno
))
1349 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1351 gcc_assert (mode
!= VOIDmode
);
1353 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1354 if (size
% align
!= 0)
1355 size
= CEIL (size
, align
) * align
;
1356 size
+= GET_MODE_SIZE (mode
);
1357 apply_args_mode
[regno
] = mode
;
1361 apply_args_mode
[regno
] = VOIDmode
;
1367 /* Return the size required for the block returned by __builtin_apply,
1368 and initialize apply_result_mode. */
1371 apply_result_size (void)
1373 static int size
= -1;
1375 enum machine_mode mode
;
1377 /* The values computed by this function never change. */
1382 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1383 if (targetm
.calls
.function_value_regno_p (regno
))
1385 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1387 gcc_assert (mode
!= VOIDmode
);
1389 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1390 if (size
% align
!= 0)
1391 size
= CEIL (size
, align
) * align
;
1392 size
+= GET_MODE_SIZE (mode
);
1393 apply_result_mode
[regno
] = mode
;
1396 apply_result_mode
[regno
] = VOIDmode
;
1398 /* Allow targets that use untyped_call and untyped_return to override
1399 the size so that machine-specific information can be stored here. */
1400 #ifdef APPLY_RESULT_SIZE
1401 size
= APPLY_RESULT_SIZE
;
1407 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1413 result_vector (int savep
, rtx result
)
1415 int regno
, size
, align
, nelts
;
1416 enum machine_mode mode
;
1418 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1421 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1422 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1424 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1425 if (size
% align
!= 0)
1426 size
= CEIL (size
, align
) * align
;
1427 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1428 mem
= adjust_address (result
, mode
, size
);
1429 savevec
[nelts
++] = (savep
1430 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1431 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1432 size
+= GET_MODE_SIZE (mode
);
1434 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1436 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1438 /* Save the state required to perform an untyped call with the same
1439 arguments as were passed to the current function. */
1442 expand_builtin_apply_args_1 (void)
1445 int size
, align
, regno
;
1446 enum machine_mode mode
;
1447 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1449 /* Create a block where the arg-pointer, structure value address,
1450 and argument registers can be saved. */
1451 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1453 /* Walk past the arg-pointer and structure value address. */
1454 size
= GET_MODE_SIZE (Pmode
);
1455 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1456 size
+= GET_MODE_SIZE (Pmode
);
1458 /* Save each register used in calling a function to the block. */
1459 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1460 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1462 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1463 if (size
% align
!= 0)
1464 size
= CEIL (size
, align
) * align
;
1466 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1468 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1469 size
+= GET_MODE_SIZE (mode
);
1472 /* Save the arg pointer to the block. */
1473 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1474 #ifdef STACK_GROWS_DOWNWARD
1475 /* We need the pointer as the caller actually passed them to us, not
1476 as we might have pretended they were passed. Make sure it's a valid
1477 operand, as emit_move_insn isn't expected to handle a PLUS. */
1479 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1482 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1484 size
= GET_MODE_SIZE (Pmode
);
1486 /* Save the structure value address unless this is passed as an
1487 "invisible" first argument. */
1488 if (struct_incoming_value
)
1490 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1491 copy_to_reg (struct_incoming_value
));
1492 size
+= GET_MODE_SIZE (Pmode
);
1495 /* Return the address of the block. */
1496 return copy_addr_to_reg (XEXP (registers
, 0));
1499 /* __builtin_apply_args returns block of memory allocated on
1500 the stack into which is stored the arg pointer, structure
1501 value address, static chain, and all the registers that might
1502 possibly be used in performing a function call. The code is
1503 moved to the start of the function so the incoming values are
1507 expand_builtin_apply_args (void)
1509 /* Don't do __builtin_apply_args more than once in a function.
1510 Save the result of the first call and reuse it. */
1511 if (apply_args_value
!= 0)
1512 return apply_args_value
;
1514 /* When this function is called, it means that registers must be
1515 saved on entry to this function. So we migrate the
1516 call to the first insn of this function. */
1521 temp
= expand_builtin_apply_args_1 ();
1525 apply_args_value
= temp
;
1527 /* Put the insns after the NOTE that starts the function.
1528 If this is inside a start_sequence, make the outer-level insn
1529 chain current, so the code is placed at the start of the
1530 function. If internal_arg_pointer is a non-virtual pseudo,
1531 it needs to be placed after the function that initializes
1533 push_topmost_sequence ();
1534 if (REG_P (crtl
->args
.internal_arg_pointer
)
1535 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1536 emit_insn_before (seq
, parm_birth_insn
);
1538 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1539 pop_topmost_sequence ();
1544 /* Perform an untyped call and save the state required to perform an
1545 untyped return of whatever value was returned by the given function. */
1548 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1550 int size
, align
, regno
;
1551 enum machine_mode mode
;
1552 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1553 rtx old_stack_level
= 0;
1554 rtx call_fusage
= 0;
1555 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1557 arguments
= convert_memory_address (Pmode
, arguments
);
1559 /* Create a block where the return registers can be saved. */
1560 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1562 /* Fetch the arg pointer from the ARGUMENTS block. */
1563 incoming_args
= gen_reg_rtx (Pmode
);
1564 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1565 #ifndef STACK_GROWS_DOWNWARD
1566 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1567 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1570 /* Push a new argument block and copy the arguments. Do not allow
1571 the (potential) memcpy call below to interfere with our stack
1573 do_pending_stack_adjust ();
1576 /* Save the stack with nonlocal if available. */
1577 #ifdef HAVE_save_stack_nonlocal
1578 if (HAVE_save_stack_nonlocal
)
1579 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1582 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1584 /* Allocate a block of memory onto the stack and copy the memory
1585 arguments to the outgoing arguments address. We can pass TRUE
1586 as the 4th argument because we just saved the stack pointer
1587 and will restore it right after the call. */
1588 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1590 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1591 may have already set current_function_calls_alloca to true.
1592 current_function_calls_alloca won't be set if argsize is zero,
1593 so we have to guarantee need_drap is true here. */
1594 if (SUPPORTS_STACK_ALIGNMENT
)
1595 crtl
->need_drap
= true;
1597 dest
= virtual_outgoing_args_rtx
;
1598 #ifndef STACK_GROWS_DOWNWARD
1599 if (CONST_INT_P (argsize
))
1600 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1602 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1604 dest
= gen_rtx_MEM (BLKmode
, dest
);
1605 set_mem_align (dest
, PARM_BOUNDARY
);
1606 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1607 set_mem_align (src
, PARM_BOUNDARY
);
1608 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1610 /* Refer to the argument block. */
1612 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1613 set_mem_align (arguments
, PARM_BOUNDARY
);
1615 /* Walk past the arg-pointer and structure value address. */
1616 size
= GET_MODE_SIZE (Pmode
);
1618 size
+= GET_MODE_SIZE (Pmode
);
1620 /* Restore each of the registers previously saved. Make USE insns
1621 for each of these registers for use in making the call. */
1622 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1623 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1625 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1626 if (size
% align
!= 0)
1627 size
= CEIL (size
, align
) * align
;
1628 reg
= gen_rtx_REG (mode
, regno
);
1629 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1630 use_reg (&call_fusage
, reg
);
1631 size
+= GET_MODE_SIZE (mode
);
1634 /* Restore the structure value address unless this is passed as an
1635 "invisible" first argument. */
1636 size
= GET_MODE_SIZE (Pmode
);
1639 rtx value
= gen_reg_rtx (Pmode
);
1640 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1641 emit_move_insn (struct_value
, value
);
1642 if (REG_P (struct_value
))
1643 use_reg (&call_fusage
, struct_value
);
1644 size
+= GET_MODE_SIZE (Pmode
);
1647 /* All arguments and registers used for the call are set up by now! */
1648 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1650 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1651 and we don't want to load it into a register as an optimization,
1652 because prepare_call_address already did it if it should be done. */
1653 if (GET_CODE (function
) != SYMBOL_REF
)
1654 function
= memory_address (FUNCTION_MODE
, function
);
1656 /* Generate the actual call instruction and save the return value. */
1657 #ifdef HAVE_untyped_call
1658 if (HAVE_untyped_call
)
1659 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1660 result
, result_vector (1, result
)));
1663 #ifdef HAVE_call_value
1664 if (HAVE_call_value
)
1668 /* Locate the unique return register. It is not possible to
1669 express a call that sets more than one return register using
1670 call_value; use untyped_call for that. In fact, untyped_call
1671 only needs to save the return registers in the given block. */
1672 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1673 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1675 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1677 valreg
= gen_rtx_REG (mode
, regno
);
1680 emit_call_insn (GEN_CALL_VALUE (valreg
,
1681 gen_rtx_MEM (FUNCTION_MODE
, function
),
1682 const0_rtx
, NULL_RTX
, const0_rtx
));
1684 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1690 /* Find the CALL insn we just emitted, and attach the register usage
1692 call_insn
= last_call_insn ();
1693 add_function_usage_to (call_insn
, call_fusage
);
1695 /* Restore the stack. */
1696 #ifdef HAVE_save_stack_nonlocal
1697 if (HAVE_save_stack_nonlocal
)
1698 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1701 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1702 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1706 /* Return the address of the result block. */
1707 result
= copy_addr_to_reg (XEXP (result
, 0));
1708 return convert_memory_address (ptr_mode
, result
);
1711 /* Perform an untyped return. */
1714 expand_builtin_return (rtx result
)
1716 int size
, align
, regno
;
1717 enum machine_mode mode
;
1719 rtx call_fusage
= 0;
1721 result
= convert_memory_address (Pmode
, result
);
1723 apply_result_size ();
1724 result
= gen_rtx_MEM (BLKmode
, result
);
1726 #ifdef HAVE_untyped_return
1727 if (HAVE_untyped_return
)
1729 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1735 /* Restore the return value and note that each value is used. */
1737 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1738 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1740 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1741 if (size
% align
!= 0)
1742 size
= CEIL (size
, align
) * align
;
1743 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1744 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1746 push_to_sequence (call_fusage
);
1748 call_fusage
= get_insns ();
1750 size
+= GET_MODE_SIZE (mode
);
1753 /* Put the USE insns before the return. */
1754 emit_insn (call_fusage
);
1756 /* Return whatever values was restored by jumping directly to the end
1758 expand_naked_return ();
1761 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1763 static enum type_class
1764 type_to_class (tree type
)
1766 switch (TREE_CODE (type
))
1768 case VOID_TYPE
: return void_type_class
;
1769 case INTEGER_TYPE
: return integer_type_class
;
1770 case ENUMERAL_TYPE
: return enumeral_type_class
;
1771 case BOOLEAN_TYPE
: return boolean_type_class
;
1772 case POINTER_TYPE
: return pointer_type_class
;
1773 case REFERENCE_TYPE
: return reference_type_class
;
1774 case OFFSET_TYPE
: return offset_type_class
;
1775 case REAL_TYPE
: return real_type_class
;
1776 case COMPLEX_TYPE
: return complex_type_class
;
1777 case FUNCTION_TYPE
: return function_type_class
;
1778 case METHOD_TYPE
: return method_type_class
;
1779 case RECORD_TYPE
: return record_type_class
;
1781 case QUAL_UNION_TYPE
: return union_type_class
;
1782 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1783 ? string_type_class
: array_type_class
);
1784 case LANG_TYPE
: return lang_type_class
;
1785 default: return no_type_class
;
1789 /* Expand a call EXP to __builtin_classify_type. */
1792 expand_builtin_classify_type (tree exp
)
1794 if (call_expr_nargs (exp
))
1795 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1796 return GEN_INT (no_type_class
);
1799 /* This helper macro, meant to be used in mathfn_built_in below,
1800 determines which among a set of three builtin math functions is
1801 appropriate for a given type mode. The `F' and `L' cases are
1802 automatically generated from the `double' case. */
1803 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1804 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1805 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1806 fcodel = BUILT_IN_MATHFN##L ; break;
1807 /* Similar to above, but appends _R after any F/L suffix. */
1808 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1809 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1810 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1811 fcodel = BUILT_IN_MATHFN##L_R ; break;
1813 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1814 if available. If IMPLICIT is true use the implicit builtin declaration,
1815 otherwise use the explicit declaration. If we can't do the conversion,
1819 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1821 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1825 CASE_MATHFN (BUILT_IN_ACOS
)
1826 CASE_MATHFN (BUILT_IN_ACOSH
)
1827 CASE_MATHFN (BUILT_IN_ASIN
)
1828 CASE_MATHFN (BUILT_IN_ASINH
)
1829 CASE_MATHFN (BUILT_IN_ATAN
)
1830 CASE_MATHFN (BUILT_IN_ATAN2
)
1831 CASE_MATHFN (BUILT_IN_ATANH
)
1832 CASE_MATHFN (BUILT_IN_CBRT
)
1833 CASE_MATHFN (BUILT_IN_CEIL
)
1834 CASE_MATHFN (BUILT_IN_CEXPI
)
1835 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1836 CASE_MATHFN (BUILT_IN_COS
)
1837 CASE_MATHFN (BUILT_IN_COSH
)
1838 CASE_MATHFN (BUILT_IN_DREM
)
1839 CASE_MATHFN (BUILT_IN_ERF
)
1840 CASE_MATHFN (BUILT_IN_ERFC
)
1841 CASE_MATHFN (BUILT_IN_EXP
)
1842 CASE_MATHFN (BUILT_IN_EXP10
)
1843 CASE_MATHFN (BUILT_IN_EXP2
)
1844 CASE_MATHFN (BUILT_IN_EXPM1
)
1845 CASE_MATHFN (BUILT_IN_FABS
)
1846 CASE_MATHFN (BUILT_IN_FDIM
)
1847 CASE_MATHFN (BUILT_IN_FLOOR
)
1848 CASE_MATHFN (BUILT_IN_FMA
)
1849 CASE_MATHFN (BUILT_IN_FMAX
)
1850 CASE_MATHFN (BUILT_IN_FMIN
)
1851 CASE_MATHFN (BUILT_IN_FMOD
)
1852 CASE_MATHFN (BUILT_IN_FREXP
)
1853 CASE_MATHFN (BUILT_IN_GAMMA
)
1854 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1855 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1856 CASE_MATHFN (BUILT_IN_HYPOT
)
1857 CASE_MATHFN (BUILT_IN_ILOGB
)
1858 CASE_MATHFN (BUILT_IN_ICEIL
)
1859 CASE_MATHFN (BUILT_IN_IFLOOR
)
1860 CASE_MATHFN (BUILT_IN_INF
)
1861 CASE_MATHFN (BUILT_IN_IRINT
)
1862 CASE_MATHFN (BUILT_IN_IROUND
)
1863 CASE_MATHFN (BUILT_IN_ISINF
)
1864 CASE_MATHFN (BUILT_IN_J0
)
1865 CASE_MATHFN (BUILT_IN_J1
)
1866 CASE_MATHFN (BUILT_IN_JN
)
1867 CASE_MATHFN (BUILT_IN_LCEIL
)
1868 CASE_MATHFN (BUILT_IN_LDEXP
)
1869 CASE_MATHFN (BUILT_IN_LFLOOR
)
1870 CASE_MATHFN (BUILT_IN_LGAMMA
)
1871 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1872 CASE_MATHFN (BUILT_IN_LLCEIL
)
1873 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1874 CASE_MATHFN (BUILT_IN_LLRINT
)
1875 CASE_MATHFN (BUILT_IN_LLROUND
)
1876 CASE_MATHFN (BUILT_IN_LOG
)
1877 CASE_MATHFN (BUILT_IN_LOG10
)
1878 CASE_MATHFN (BUILT_IN_LOG1P
)
1879 CASE_MATHFN (BUILT_IN_LOG2
)
1880 CASE_MATHFN (BUILT_IN_LOGB
)
1881 CASE_MATHFN (BUILT_IN_LRINT
)
1882 CASE_MATHFN (BUILT_IN_LROUND
)
1883 CASE_MATHFN (BUILT_IN_MODF
)
1884 CASE_MATHFN (BUILT_IN_NAN
)
1885 CASE_MATHFN (BUILT_IN_NANS
)
1886 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1887 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1888 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1889 CASE_MATHFN (BUILT_IN_POW
)
1890 CASE_MATHFN (BUILT_IN_POWI
)
1891 CASE_MATHFN (BUILT_IN_POW10
)
1892 CASE_MATHFN (BUILT_IN_REMAINDER
)
1893 CASE_MATHFN (BUILT_IN_REMQUO
)
1894 CASE_MATHFN (BUILT_IN_RINT
)
1895 CASE_MATHFN (BUILT_IN_ROUND
)
1896 CASE_MATHFN (BUILT_IN_SCALB
)
1897 CASE_MATHFN (BUILT_IN_SCALBLN
)
1898 CASE_MATHFN (BUILT_IN_SCALBN
)
1899 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1900 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1901 CASE_MATHFN (BUILT_IN_SIN
)
1902 CASE_MATHFN (BUILT_IN_SINCOS
)
1903 CASE_MATHFN (BUILT_IN_SINH
)
1904 CASE_MATHFN (BUILT_IN_SQRT
)
1905 CASE_MATHFN (BUILT_IN_TAN
)
1906 CASE_MATHFN (BUILT_IN_TANH
)
1907 CASE_MATHFN (BUILT_IN_TGAMMA
)
1908 CASE_MATHFN (BUILT_IN_TRUNC
)
1909 CASE_MATHFN (BUILT_IN_Y0
)
1910 CASE_MATHFN (BUILT_IN_Y1
)
1911 CASE_MATHFN (BUILT_IN_YN
)
1917 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1919 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1921 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1926 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1929 return builtin_decl_explicit (fcode2
);
1932 /* Like mathfn_built_in_1(), but always use the implicit array. */
1935 mathfn_built_in (tree type
, enum built_in_function fn
)
1937 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1940 /* If errno must be maintained, expand the RTL to check if the result,
1941 TARGET, of a built-in function call, EXP, is NaN, and if so set
1945 expand_errno_check (tree exp
, rtx target
)
1947 rtx lab
= gen_label_rtx ();
1949 /* Test the result; if it is NaN, set errno=EDOM because
1950 the argument was not in the domain. */
1951 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1952 NULL_RTX
, NULL_RTX
, lab
,
1953 /* The jump is very likely. */
1954 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1957 /* If this built-in doesn't throw an exception, set errno directly. */
1958 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1960 #ifdef GEN_ERRNO_RTX
1961 rtx errno_rtx
= GEN_ERRNO_RTX
;
1964 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1966 emit_move_insn (errno_rtx
,
1967 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1973 /* Make sure the library call isn't expanded as a tail call. */
1974 CALL_EXPR_TAILCALL (exp
) = 0;
1976 /* We can't set errno=EDOM directly; let the library call do it.
1977 Pop the arguments right away in case the call gets deleted. */
1979 expand_call (exp
, target
, 0);
1984 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1985 Return NULL_RTX if a normal call should be emitted rather than expanding
1986 the function in-line. EXP is the expression that is a call to the builtin
1987 function; if convenient, the result should be placed in TARGET.
1988 SUBTARGET may be used as the target for computing one of EXP's operands. */
1991 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1993 optab builtin_optab
;
1995 tree fndecl
= get_callee_fndecl (exp
);
1996 enum machine_mode mode
;
1997 bool errno_set
= false;
1998 bool try_widening
= false;
2001 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2004 arg
= CALL_EXPR_ARG (exp
, 0);
2006 switch (DECL_FUNCTION_CODE (fndecl
))
2008 CASE_FLT_FN (BUILT_IN_SQRT
):
2009 errno_set
= ! tree_expr_nonnegative_p (arg
);
2010 try_widening
= true;
2011 builtin_optab
= sqrt_optab
;
2013 CASE_FLT_FN (BUILT_IN_EXP
):
2014 errno_set
= true; builtin_optab
= exp_optab
; break;
2015 CASE_FLT_FN (BUILT_IN_EXP10
):
2016 CASE_FLT_FN (BUILT_IN_POW10
):
2017 errno_set
= true; builtin_optab
= exp10_optab
; break;
2018 CASE_FLT_FN (BUILT_IN_EXP2
):
2019 errno_set
= true; builtin_optab
= exp2_optab
; break;
2020 CASE_FLT_FN (BUILT_IN_EXPM1
):
2021 errno_set
= true; builtin_optab
= expm1_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_LOGB
):
2023 errno_set
= true; builtin_optab
= logb_optab
; break;
2024 CASE_FLT_FN (BUILT_IN_LOG
):
2025 errno_set
= true; builtin_optab
= log_optab
; break;
2026 CASE_FLT_FN (BUILT_IN_LOG10
):
2027 errno_set
= true; builtin_optab
= log10_optab
; break;
2028 CASE_FLT_FN (BUILT_IN_LOG2
):
2029 errno_set
= true; builtin_optab
= log2_optab
; break;
2030 CASE_FLT_FN (BUILT_IN_LOG1P
):
2031 errno_set
= true; builtin_optab
= log1p_optab
; break;
2032 CASE_FLT_FN (BUILT_IN_ASIN
):
2033 builtin_optab
= asin_optab
; break;
2034 CASE_FLT_FN (BUILT_IN_ACOS
):
2035 builtin_optab
= acos_optab
; break;
2036 CASE_FLT_FN (BUILT_IN_TAN
):
2037 builtin_optab
= tan_optab
; break;
2038 CASE_FLT_FN (BUILT_IN_ATAN
):
2039 builtin_optab
= atan_optab
; break;
2040 CASE_FLT_FN (BUILT_IN_FLOOR
):
2041 builtin_optab
= floor_optab
; break;
2042 CASE_FLT_FN (BUILT_IN_CEIL
):
2043 builtin_optab
= ceil_optab
; break;
2044 CASE_FLT_FN (BUILT_IN_TRUNC
):
2045 builtin_optab
= btrunc_optab
; break;
2046 CASE_FLT_FN (BUILT_IN_ROUND
):
2047 builtin_optab
= round_optab
; break;
2048 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2049 builtin_optab
= nearbyint_optab
;
2050 if (flag_trapping_math
)
2052 /* Else fallthrough and expand as rint. */
2053 CASE_FLT_FN (BUILT_IN_RINT
):
2054 builtin_optab
= rint_optab
; break;
2055 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2056 builtin_optab
= significand_optab
; break;
2061 /* Make a suitable register to place result in. */
2062 mode
= TYPE_MODE (TREE_TYPE (exp
));
2064 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2067 /* Before working hard, check whether the instruction is available, but try
2068 to widen the mode for specific operations. */
2069 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2070 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2071 && (!errno_set
|| !optimize_insn_for_size_p ()))
2073 rtx result
= gen_reg_rtx (mode
);
2075 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2076 need to expand the argument again. This way, we will not perform
2077 side-effects more the once. */
2078 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2080 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2084 /* Compute into RESULT.
2085 Set RESULT to wherever the result comes back. */
2086 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2091 expand_errno_check (exp
, result
);
2093 /* Output the entire sequence. */
2094 insns
= get_insns ();
2100 /* If we were unable to expand via the builtin, stop the sequence
2101 (without outputting the insns) and call to the library function
2102 with the stabilized argument list. */
2106 return expand_call (exp
, target
, target
== const0_rtx
);
2109 /* Expand a call to the builtin binary math functions (pow and atan2).
2110 Return NULL_RTX if a normal call should be emitted rather than expanding the
2111 function in-line. EXP is the expression that is a call to the builtin
2112 function; if convenient, the result should be placed in TARGET.
2113 SUBTARGET may be used as the target for computing one of EXP's
2117 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2119 optab builtin_optab
;
2120 rtx op0
, op1
, insns
, result
;
2121 int op1_type
= REAL_TYPE
;
2122 tree fndecl
= get_callee_fndecl (exp
);
2124 enum machine_mode mode
;
2125 bool errno_set
= true;
2127 switch (DECL_FUNCTION_CODE (fndecl
))
2129 CASE_FLT_FN (BUILT_IN_SCALBN
):
2130 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2131 CASE_FLT_FN (BUILT_IN_LDEXP
):
2132 op1_type
= INTEGER_TYPE
;
2137 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2140 arg0
= CALL_EXPR_ARG (exp
, 0);
2141 arg1
= CALL_EXPR_ARG (exp
, 1);
2143 switch (DECL_FUNCTION_CODE (fndecl
))
2145 CASE_FLT_FN (BUILT_IN_POW
):
2146 builtin_optab
= pow_optab
; break;
2147 CASE_FLT_FN (BUILT_IN_ATAN2
):
2148 builtin_optab
= atan2_optab
; break;
2149 CASE_FLT_FN (BUILT_IN_SCALB
):
2150 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2152 builtin_optab
= scalb_optab
; break;
2153 CASE_FLT_FN (BUILT_IN_SCALBN
):
2154 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2155 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2157 /* Fall through... */
2158 CASE_FLT_FN (BUILT_IN_LDEXP
):
2159 builtin_optab
= ldexp_optab
; break;
2160 CASE_FLT_FN (BUILT_IN_FMOD
):
2161 builtin_optab
= fmod_optab
; break;
2162 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2163 CASE_FLT_FN (BUILT_IN_DREM
):
2164 builtin_optab
= remainder_optab
; break;
2169 /* Make a suitable register to place result in. */
2170 mode
= TYPE_MODE (TREE_TYPE (exp
));
2172 /* Before working hard, check whether the instruction is available. */
2173 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2176 result
= gen_reg_rtx (mode
);
2178 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2181 if (errno_set
&& optimize_insn_for_size_p ())
2184 /* Always stabilize the argument list. */
2185 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2186 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2188 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2189 op1
= expand_normal (arg1
);
2193 /* Compute into RESULT.
2194 Set RESULT to wherever the result comes back. */
2195 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2196 result
, 0, OPTAB_DIRECT
);
2198 /* If we were unable to expand via the builtin, stop the sequence
2199 (without outputting the insns) and call to the library function
2200 with the stabilized argument list. */
2204 return expand_call (exp
, target
, target
== const0_rtx
);
2208 expand_errno_check (exp
, result
);
2210 /* Output the entire sequence. */
2211 insns
= get_insns ();
2218 /* Expand a call to the builtin trinary math functions (fma).
2219 Return NULL_RTX if a normal call should be emitted rather than expanding the
2220 function in-line. EXP is the expression that is a call to the builtin
2221 function; if convenient, the result should be placed in TARGET.
2222 SUBTARGET may be used as the target for computing one of EXP's
2226 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2228 optab builtin_optab
;
2229 rtx op0
, op1
, op2
, insns
, result
;
2230 tree fndecl
= get_callee_fndecl (exp
);
2231 tree arg0
, arg1
, arg2
;
2232 enum machine_mode mode
;
2234 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2237 arg0
= CALL_EXPR_ARG (exp
, 0);
2238 arg1
= CALL_EXPR_ARG (exp
, 1);
2239 arg2
= CALL_EXPR_ARG (exp
, 2);
2241 switch (DECL_FUNCTION_CODE (fndecl
))
2243 CASE_FLT_FN (BUILT_IN_FMA
):
2244 builtin_optab
= fma_optab
; break;
2249 /* Make a suitable register to place result in. */
2250 mode
= TYPE_MODE (TREE_TYPE (exp
));
2252 /* Before working hard, check whether the instruction is available. */
2253 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2256 result
= gen_reg_rtx (mode
);
2258 /* Always stabilize the argument list. */
2259 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2260 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2261 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2263 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2264 op1
= expand_normal (arg1
);
2265 op2
= expand_normal (arg2
);
2269 /* Compute into RESULT.
2270 Set RESULT to wherever the result comes back. */
2271 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2274 /* If we were unable to expand via the builtin, stop the sequence
2275 (without outputting the insns) and call to the library function
2276 with the stabilized argument list. */
2280 return expand_call (exp
, target
, target
== const0_rtx
);
2283 /* Output the entire sequence. */
2284 insns
= get_insns ();
2291 /* Expand a call to the builtin sin and cos math functions.
2292 Return NULL_RTX if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's
2299 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2301 optab builtin_optab
;
2303 tree fndecl
= get_callee_fndecl (exp
);
2304 enum machine_mode mode
;
2307 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2310 arg
= CALL_EXPR_ARG (exp
, 0);
2312 switch (DECL_FUNCTION_CODE (fndecl
))
2314 CASE_FLT_FN (BUILT_IN_SIN
):
2315 CASE_FLT_FN (BUILT_IN_COS
):
2316 builtin_optab
= sincos_optab
; break;
2321 /* Make a suitable register to place result in. */
2322 mode
= TYPE_MODE (TREE_TYPE (exp
));
2324 /* Check if sincos insn is available, otherwise fallback
2325 to sin or cos insn. */
2326 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2327 switch (DECL_FUNCTION_CODE (fndecl
))
2329 CASE_FLT_FN (BUILT_IN_SIN
):
2330 builtin_optab
= sin_optab
; break;
2331 CASE_FLT_FN (BUILT_IN_COS
):
2332 builtin_optab
= cos_optab
; break;
2337 /* Before working hard, check whether the instruction is available. */
2338 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2340 rtx result
= gen_reg_rtx (mode
);
2342 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2343 need to expand the argument again. This way, we will not perform
2344 side-effects more the once. */
2345 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2347 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2351 /* Compute into RESULT.
2352 Set RESULT to wherever the result comes back. */
2353 if (builtin_optab
== sincos_optab
)
2357 switch (DECL_FUNCTION_CODE (fndecl
))
2359 CASE_FLT_FN (BUILT_IN_SIN
):
2360 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2362 CASE_FLT_FN (BUILT_IN_COS
):
2363 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2371 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2375 /* Output the entire sequence. */
2376 insns
= get_insns ();
2382 /* If we were unable to expand via the builtin, stop the sequence
2383 (without outputting the insns) and call to the library function
2384 with the stabilized argument list. */
2388 return expand_call (exp
, target
, target
== const0_rtx
);
2391 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2392 return an RTL instruction code that implements the functionality.
2393 If that isn't possible or available return CODE_FOR_nothing. */
2395 static enum insn_code
2396 interclass_mathfn_icode (tree arg
, tree fndecl
)
2398 bool errno_set
= false;
2399 optab builtin_optab
= unknown_optab
;
2400 enum machine_mode mode
;
2402 switch (DECL_FUNCTION_CODE (fndecl
))
2404 CASE_FLT_FN (BUILT_IN_ILOGB
):
2405 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2406 CASE_FLT_FN (BUILT_IN_ISINF
):
2407 builtin_optab
= isinf_optab
; break;
2408 case BUILT_IN_ISNORMAL
:
2409 case BUILT_IN_ISFINITE
:
2410 CASE_FLT_FN (BUILT_IN_FINITE
):
2411 case BUILT_IN_FINITED32
:
2412 case BUILT_IN_FINITED64
:
2413 case BUILT_IN_FINITED128
:
2414 case BUILT_IN_ISINFD32
:
2415 case BUILT_IN_ISINFD64
:
2416 case BUILT_IN_ISINFD128
:
2417 /* These builtins have no optabs (yet). */
2423 /* There's no easy way to detect the case we need to set EDOM. */
2424 if (flag_errno_math
&& errno_set
)
2425 return CODE_FOR_nothing
;
2427 /* Optab mode depends on the mode of the input argument. */
2428 mode
= TYPE_MODE (TREE_TYPE (arg
));
2431 return optab_handler (builtin_optab
, mode
);
2432 return CODE_FOR_nothing
;
2435 /* Expand a call to one of the builtin math functions that operate on
2436 floating point argument and output an integer result (ilogb, isinf,
2438 Return 0 if a normal call should be emitted rather than expanding the
2439 function in-line. EXP is the expression that is a call to the builtin
2440 function; if convenient, the result should be placed in TARGET. */
2443 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2445 enum insn_code icode
= CODE_FOR_nothing
;
2447 tree fndecl
= get_callee_fndecl (exp
);
2448 enum machine_mode mode
;
2451 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2454 arg
= CALL_EXPR_ARG (exp
, 0);
2455 icode
= interclass_mathfn_icode (arg
, fndecl
);
2456 mode
= TYPE_MODE (TREE_TYPE (arg
));
2458 if (icode
!= CODE_FOR_nothing
)
2460 struct expand_operand ops
[1];
2461 rtx last
= get_last_insn ();
2462 tree orig_arg
= arg
;
2464 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2465 need to expand the argument again. This way, we will not perform
2466 side-effects more the once. */
2467 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2469 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2471 if (mode
!= GET_MODE (op0
))
2472 op0
= convert_to_mode (mode
, op0
, 0);
2474 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2475 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2476 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2477 return ops
[0].value
;
2479 delete_insns_since (last
);
2480 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2486 /* Expand a call to the builtin sincos math function.
2487 Return NULL_RTX if a normal call should be emitted rather than expanding the
2488 function in-line. EXP is the expression that is a call to the builtin
2492 expand_builtin_sincos (tree exp
)
2494 rtx op0
, op1
, op2
, target1
, target2
;
2495 enum machine_mode mode
;
2496 tree arg
, sinp
, cosp
;
2498 location_t loc
= EXPR_LOCATION (exp
);
2499 tree alias_type
, alias_off
;
2501 if (!validate_arglist (exp
, REAL_TYPE
,
2502 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2505 arg
= CALL_EXPR_ARG (exp
, 0);
2506 sinp
= CALL_EXPR_ARG (exp
, 1);
2507 cosp
= CALL_EXPR_ARG (exp
, 2);
2509 /* Make a suitable register to place result in. */
2510 mode
= TYPE_MODE (TREE_TYPE (arg
));
2512 /* Check if sincos insn is available, otherwise emit the call. */
2513 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2516 target1
= gen_reg_rtx (mode
);
2517 target2
= gen_reg_rtx (mode
);
2519 op0
= expand_normal (arg
);
2520 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2521 alias_off
= build_int_cst (alias_type
, 0);
2522 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2524 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2527 /* Compute into target1 and target2.
2528 Set TARGET to wherever the result comes back. */
2529 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2530 gcc_assert (result
);
2532 /* Move target1 and target2 to the memory locations indicated
2534 emit_move_insn (op1
, target1
);
2535 emit_move_insn (op2
, target2
);
2540 /* Expand a call to the internal cexpi builtin to the sincos math function.
2541 EXP is the expression that is a call to the builtin function; if convenient,
2542 the result should be placed in TARGET. */
2545 expand_builtin_cexpi (tree exp
, rtx target
)
2547 tree fndecl
= get_callee_fndecl (exp
);
2549 enum machine_mode mode
;
2551 location_t loc
= EXPR_LOCATION (exp
);
2553 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2556 arg
= CALL_EXPR_ARG (exp
, 0);
2557 type
= TREE_TYPE (arg
);
2558 mode
= TYPE_MODE (TREE_TYPE (arg
));
2560 /* Try expanding via a sincos optab, fall back to emitting a libcall
2561 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2562 is only generated from sincos, cexp or if we have either of them. */
2563 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2565 op1
= gen_reg_rtx (mode
);
2566 op2
= gen_reg_rtx (mode
);
2568 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2570 /* Compute into op1 and op2. */
2571 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2573 else if (targetm
.libc_has_function (function_sincos
))
2575 tree call
, fn
= NULL_TREE
;
2579 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2580 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2581 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2582 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2583 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2584 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2588 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2589 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2590 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2591 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2592 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2593 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2595 /* Make sure not to fold the sincos call again. */
2596 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2597 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2598 call
, 3, arg
, top1
, top2
));
2602 tree call
, fn
= NULL_TREE
, narg
;
2603 tree ctype
= build_complex_type (type
);
2605 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2606 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2607 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2608 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2609 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2610 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2614 /* If we don't have a decl for cexp create one. This is the
2615 friendliest fallback if the user calls __builtin_cexpi
2616 without full target C99 function support. */
2617 if (fn
== NULL_TREE
)
2620 const char *name
= NULL
;
2622 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2624 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2626 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2629 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2630 fn
= build_fn_decl (name
, fntype
);
2633 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2634 build_real (type
, dconst0
), arg
);
2636 /* Make sure not to fold the cexp call again. */
2637 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2638 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2639 target
, VOIDmode
, EXPAND_NORMAL
);
2642 /* Now build the proper return type. */
2643 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2644 make_tree (TREE_TYPE (arg
), op2
),
2645 make_tree (TREE_TYPE (arg
), op1
)),
2646 target
, VOIDmode
, EXPAND_NORMAL
);
2649 /* Conveniently construct a function call expression. FNDECL names the
2650 function to be called, N is the number of arguments, and the "..."
2651 parameters are the argument expressions. Unlike build_call_exr
2652 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2655 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2658 tree fntype
= TREE_TYPE (fndecl
);
2659 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2662 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2664 SET_EXPR_LOCATION (fn
, loc
);
2668 /* Expand a call to one of the builtin rounding functions gcc defines
2669 as an extension (lfloor and lceil). As these are gcc extensions we
2670 do not need to worry about setting errno to EDOM.
2671 If expanding via optab fails, lower expression to (int)(floor(x)).
2672 EXP is the expression that is a call to the builtin function;
2673 if convenient, the result should be placed in TARGET. */
2676 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2678 convert_optab builtin_optab
;
2679 rtx op0
, insns
, tmp
;
2680 tree fndecl
= get_callee_fndecl (exp
);
2681 enum built_in_function fallback_fn
;
2682 tree fallback_fndecl
;
2683 enum machine_mode mode
;
2686 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2689 arg
= CALL_EXPR_ARG (exp
, 0);
2691 switch (DECL_FUNCTION_CODE (fndecl
))
2693 CASE_FLT_FN (BUILT_IN_ICEIL
):
2694 CASE_FLT_FN (BUILT_IN_LCEIL
):
2695 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2696 builtin_optab
= lceil_optab
;
2697 fallback_fn
= BUILT_IN_CEIL
;
2700 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2701 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2702 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2703 builtin_optab
= lfloor_optab
;
2704 fallback_fn
= BUILT_IN_FLOOR
;
2711 /* Make a suitable register to place result in. */
2712 mode
= TYPE_MODE (TREE_TYPE (exp
));
2714 target
= gen_reg_rtx (mode
);
2716 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2717 need to expand the argument again. This way, we will not perform
2718 side-effects more the once. */
2719 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2721 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2725 /* Compute into TARGET. */
2726 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2728 /* Output the entire sequence. */
2729 insns
= get_insns ();
2735 /* If we were unable to expand via the builtin, stop the sequence
2736 (without outputting the insns). */
2739 /* Fall back to floating point rounding optab. */
2740 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2742 /* For non-C99 targets we may end up without a fallback fndecl here
2743 if the user called __builtin_lfloor directly. In this case emit
2744 a call to the floor/ceil variants nevertheless. This should result
2745 in the best user experience for not full C99 targets. */
2746 if (fallback_fndecl
== NULL_TREE
)
2749 const char *name
= NULL
;
2751 switch (DECL_FUNCTION_CODE (fndecl
))
2753 case BUILT_IN_ICEIL
:
2754 case BUILT_IN_LCEIL
:
2755 case BUILT_IN_LLCEIL
:
2758 case BUILT_IN_ICEILF
:
2759 case BUILT_IN_LCEILF
:
2760 case BUILT_IN_LLCEILF
:
2763 case BUILT_IN_ICEILL
:
2764 case BUILT_IN_LCEILL
:
2765 case BUILT_IN_LLCEILL
:
2768 case BUILT_IN_IFLOOR
:
2769 case BUILT_IN_LFLOOR
:
2770 case BUILT_IN_LLFLOOR
:
2773 case BUILT_IN_IFLOORF
:
2774 case BUILT_IN_LFLOORF
:
2775 case BUILT_IN_LLFLOORF
:
2778 case BUILT_IN_IFLOORL
:
2779 case BUILT_IN_LFLOORL
:
2780 case BUILT_IN_LLFLOORL
:
2787 fntype
= build_function_type_list (TREE_TYPE (arg
),
2788 TREE_TYPE (arg
), NULL_TREE
);
2789 fallback_fndecl
= build_fn_decl (name
, fntype
);
2792 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2794 tmp
= expand_normal (exp
);
2795 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2797 /* Truncate the result of floating point optab to integer
2798 via expand_fix (). */
2799 target
= gen_reg_rtx (mode
);
2800 expand_fix (target
, tmp
, 0);
2805 /* Expand a call to one of the builtin math functions doing integer
2807 Return 0 if a normal call should be emitted rather than expanding the
2808 function in-line. EXP is the expression that is a call to the builtin
2809 function; if convenient, the result should be placed in TARGET. */
2812 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2814 convert_optab builtin_optab
;
2816 tree fndecl
= get_callee_fndecl (exp
);
2818 enum machine_mode mode
;
2819 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2821 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2824 arg
= CALL_EXPR_ARG (exp
, 0);
2826 switch (DECL_FUNCTION_CODE (fndecl
))
2828 CASE_FLT_FN (BUILT_IN_IRINT
):
2829 fallback_fn
= BUILT_IN_LRINT
;
2831 CASE_FLT_FN (BUILT_IN_LRINT
):
2832 CASE_FLT_FN (BUILT_IN_LLRINT
):
2833 builtin_optab
= lrint_optab
;
2836 CASE_FLT_FN (BUILT_IN_IROUND
):
2837 fallback_fn
= BUILT_IN_LROUND
;
2839 CASE_FLT_FN (BUILT_IN_LROUND
):
2840 CASE_FLT_FN (BUILT_IN_LLROUND
):
2841 builtin_optab
= lround_optab
;
2848 /* There's no easy way to detect the case we need to set EDOM. */
2849 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2852 /* Make a suitable register to place result in. */
2853 mode
= TYPE_MODE (TREE_TYPE (exp
));
2855 /* There's no easy way to detect the case we need to set EDOM. */
2856 if (!flag_errno_math
)
2858 rtx result
= gen_reg_rtx (mode
);
2860 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2861 need to expand the argument again. This way, we will not perform
2862 side-effects more the once. */
2863 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2865 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2869 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2871 /* Output the entire sequence. */
2872 insns
= get_insns ();
2878 /* If we were unable to expand via the builtin, stop the sequence
2879 (without outputting the insns) and call to the library function
2880 with the stabilized argument list. */
2884 if (fallback_fn
!= BUILT_IN_NONE
)
2886 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2887 targets, (int) round (x) should never be transformed into
2888 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2889 a call to lround in the hope that the target provides at least some
2890 C99 functions. This should result in the best user experience for
2891 not full C99 targets. */
2892 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2895 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2896 fallback_fndecl
, 1, arg
);
2898 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2899 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2900 return convert_to_mode (mode
, target
, 0);
2903 return expand_call (exp
, target
, target
== const0_rtx
);
2906 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2907 a normal call should be emitted rather than expanding the function
2908 in-line. EXP is the expression that is a call to the builtin
2909 function; if convenient, the result should be placed in TARGET. */
2912 expand_builtin_powi (tree exp
, rtx target
)
2916 enum machine_mode mode
;
2917 enum machine_mode mode2
;
2919 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2922 arg0
= CALL_EXPR_ARG (exp
, 0);
2923 arg1
= CALL_EXPR_ARG (exp
, 1);
2924 mode
= TYPE_MODE (TREE_TYPE (exp
));
2926 /* Emit a libcall to libgcc. */
2928 /* Mode of the 2nd argument must match that of an int. */
2929 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2931 if (target
== NULL_RTX
)
2932 target
= gen_reg_rtx (mode
);
2934 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2935 if (GET_MODE (op0
) != mode
)
2936 op0
= convert_to_mode (mode
, op0
, 0);
2937 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2938 if (GET_MODE (op1
) != mode2
)
2939 op1
= convert_to_mode (mode2
, op1
, 0);
2941 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2942 target
, LCT_CONST
, mode
, 2,
2943 op0
, mode
, op1
, mode2
);
2948 /* Expand expression EXP which is a call to the strlen builtin. Return
2949 NULL_RTX if we failed the caller should emit a normal call, otherwise
2950 try to get the result in TARGET, if convenient. */
2953 expand_builtin_strlen (tree exp
, rtx target
,
2954 enum machine_mode target_mode
)
2956 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2960 struct expand_operand ops
[4];
2963 tree src
= CALL_EXPR_ARG (exp
, 0);
2964 rtx src_reg
, before_strlen
;
2965 enum machine_mode insn_mode
= target_mode
;
2966 enum insn_code icode
= CODE_FOR_nothing
;
2969 /* If the length can be computed at compile-time, return it. */
2970 len
= c_strlen (src
, 0);
2972 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2974 /* If the length can be computed at compile-time and is constant
2975 integer, but there are side-effects in src, evaluate
2976 src for side-effects, then return len.
2977 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2978 can be optimized into: i++; x = 3; */
2979 len
= c_strlen (src
, 1);
2980 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2982 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2983 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2986 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2988 /* If SRC is not a pointer type, don't do this operation inline. */
2992 /* Bail out if we can't compute strlen in the right mode. */
2993 while (insn_mode
!= VOIDmode
)
2995 icode
= optab_handler (strlen_optab
, insn_mode
);
2996 if (icode
!= CODE_FOR_nothing
)
2999 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3001 if (insn_mode
== VOIDmode
)
3004 /* Make a place to hold the source address. We will not expand
3005 the actual source until we are sure that the expansion will
3006 not fail -- there are trees that cannot be expanded twice. */
3007 src_reg
= gen_reg_rtx (Pmode
);
3009 /* Mark the beginning of the strlen sequence so we can emit the
3010 source operand later. */
3011 before_strlen
= get_last_insn ();
3013 create_output_operand (&ops
[0], target
, insn_mode
);
3014 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3015 create_integer_operand (&ops
[2], 0);
3016 create_integer_operand (&ops
[3], align
);
3017 if (!maybe_expand_insn (icode
, 4, ops
))
3020 /* Now that we are assured of success, expand the source. */
3022 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3025 #ifdef POINTERS_EXTEND_UNSIGNED
3026 if (GET_MODE (pat
) != Pmode
)
3027 pat
= convert_to_mode (Pmode
, pat
,
3028 POINTERS_EXTEND_UNSIGNED
);
3030 emit_move_insn (src_reg
, pat
);
3036 emit_insn_after (pat
, before_strlen
);
3038 emit_insn_before (pat
, get_insns ());
3040 /* Return the value in the proper mode for this function. */
3041 if (GET_MODE (ops
[0].value
) == target_mode
)
3042 target
= ops
[0].value
;
3043 else if (target
!= 0)
3044 convert_move (target
, ops
[0].value
, 0);
3046 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3052 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3053 bytes from constant string DATA + OFFSET and return it as target
3057 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3058 enum machine_mode mode
)
3060 const char *str
= (const char *) data
;
3062 gcc_assert (offset
>= 0
3063 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3064 <= strlen (str
) + 1));
3066 return c_readstr (str
+ offset
, mode
);
3069 /* Expand a call EXP to the memcpy builtin.
3070 Return NULL_RTX if we failed, the caller should emit a normal call,
3071 otherwise try to get the result in TARGET, if convenient (and in
3072 mode MODE if that's convenient). */
3075 expand_builtin_memcpy (tree exp
, rtx target
)
3077 if (!validate_arglist (exp
,
3078 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3082 tree dest
= CALL_EXPR_ARG (exp
, 0);
3083 tree src
= CALL_EXPR_ARG (exp
, 1);
3084 tree len
= CALL_EXPR_ARG (exp
, 2);
3085 const char *src_str
;
3086 unsigned int src_align
= get_pointer_alignment (src
);
3087 unsigned int dest_align
= get_pointer_alignment (dest
);
3088 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3089 HOST_WIDE_INT expected_size
= -1;
3090 unsigned int expected_align
= 0;
3092 /* If DEST is not a pointer type, call the normal function. */
3093 if (dest_align
== 0)
3096 /* If either SRC is not a pointer type, don't do this
3097 operation in-line. */
3101 if (currently_expanding_gimple_stmt
)
3102 stringop_block_profile (currently_expanding_gimple_stmt
,
3103 &expected_align
, &expected_size
);
3105 if (expected_align
< dest_align
)
3106 expected_align
= dest_align
;
3107 dest_mem
= get_memory_rtx (dest
, len
);
3108 set_mem_align (dest_mem
, dest_align
);
3109 len_rtx
= expand_normal (len
);
3110 src_str
= c_getstr (src
);
3112 /* If SRC is a string constant and block move would be done
3113 by pieces, we can avoid loading the string from memory
3114 and only stored the computed constants. */
3116 && CONST_INT_P (len_rtx
)
3117 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3118 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3119 CONST_CAST (char *, src_str
),
3122 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3123 builtin_memcpy_read_str
,
3124 CONST_CAST (char *, src_str
),
3125 dest_align
, false, 0);
3126 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3127 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3131 src_mem
= get_memory_rtx (src
, len
);
3132 set_mem_align (src_mem
, src_align
);
3134 /* Copy word part most expediently. */
3135 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3136 CALL_EXPR_TAILCALL (exp
)
3137 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3138 expected_align
, expected_size
);
3142 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3143 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3149 /* Expand a call EXP to the mempcpy builtin.
3150 Return NULL_RTX if we failed; the caller should emit a normal call,
3151 otherwise try to get the result in TARGET, if convenient (and in
3152 mode MODE if that's convenient). If ENDP is 0 return the
3153 destination pointer, if ENDP is 1 return the end pointer ala
3154 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3158 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3160 if (!validate_arglist (exp
,
3161 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3165 tree dest
= CALL_EXPR_ARG (exp
, 0);
3166 tree src
= CALL_EXPR_ARG (exp
, 1);
3167 tree len
= CALL_EXPR_ARG (exp
, 2);
3168 return expand_builtin_mempcpy_args (dest
, src
, len
,
3169 target
, mode
, /*endp=*/ 1);
3173 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3174 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3175 so that this can also be called without constructing an actual CALL_EXPR.
3176 The other arguments and return value are the same as for
3177 expand_builtin_mempcpy. */
3180 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3181 rtx target
, enum machine_mode mode
, int endp
)
3183 /* If return value is ignored, transform mempcpy into memcpy. */
3184 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3186 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3187 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3189 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3193 const char *src_str
;
3194 unsigned int src_align
= get_pointer_alignment (src
);
3195 unsigned int dest_align
= get_pointer_alignment (dest
);
3196 rtx dest_mem
, src_mem
, len_rtx
;
3198 /* If either SRC or DEST is not a pointer type, don't do this
3199 operation in-line. */
3200 if (dest_align
== 0 || src_align
== 0)
3203 /* If LEN is not constant, call the normal function. */
3204 if (! host_integerp (len
, 1))
3207 len_rtx
= expand_normal (len
);
3208 src_str
= c_getstr (src
);
3210 /* If SRC is a string constant and block move would be done
3211 by pieces, we can avoid loading the string from memory
3212 and only stored the computed constants. */
3214 && CONST_INT_P (len_rtx
)
3215 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3216 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3217 CONST_CAST (char *, src_str
),
3220 dest_mem
= get_memory_rtx (dest
, len
);
3221 set_mem_align (dest_mem
, dest_align
);
3222 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3223 builtin_memcpy_read_str
,
3224 CONST_CAST (char *, src_str
),
3225 dest_align
, false, endp
);
3226 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3227 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3231 if (CONST_INT_P (len_rtx
)
3232 && can_move_by_pieces (INTVAL (len_rtx
),
3233 MIN (dest_align
, src_align
)))
3235 dest_mem
= get_memory_rtx (dest
, len
);
3236 set_mem_align (dest_mem
, dest_align
);
3237 src_mem
= get_memory_rtx (src
, len
);
3238 set_mem_align (src_mem
, src_align
);
3239 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3240 MIN (dest_align
, src_align
), endp
);
3241 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3242 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3251 # define HAVE_movstr 0
3252 # define CODE_FOR_movstr CODE_FOR_nothing
3255 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3256 we failed, the caller should emit a normal call, otherwise try to
3257 get the result in TARGET, if convenient. If ENDP is 0 return the
3258 destination pointer, if ENDP is 1 return the end pointer ala
3259 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3263 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3265 struct expand_operand ops
[3];
3272 dest_mem
= get_memory_rtx (dest
, NULL
);
3273 src_mem
= get_memory_rtx (src
, NULL
);
3276 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3277 dest_mem
= replace_equiv_address (dest_mem
, target
);
3280 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3281 create_fixed_operand (&ops
[1], dest_mem
);
3282 create_fixed_operand (&ops
[2], src_mem
);
3283 expand_insn (CODE_FOR_movstr
, 3, ops
);
3285 if (endp
&& target
!= const0_rtx
)
3287 target
= ops
[0].value
;
3288 /* movstr is supposed to set end to the address of the NUL
3289 terminator. If the caller requested a mempcpy-like return value,
3293 rtx tem
= plus_constant (GET_MODE (target
),
3294 gen_lowpart (GET_MODE (target
), target
), 1);
3295 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3301 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3302 NULL_RTX if we failed the caller should emit a normal call, otherwise
3303 try to get the result in TARGET, if convenient (and in mode MODE if that's
3307 expand_builtin_strcpy (tree exp
, rtx target
)
3309 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3311 tree dest
= CALL_EXPR_ARG (exp
, 0);
3312 tree src
= CALL_EXPR_ARG (exp
, 1);
3313 return expand_builtin_strcpy_args (dest
, src
, target
);
3318 /* Helper function to do the actual work for expand_builtin_strcpy. The
3319 arguments to the builtin_strcpy call DEST and SRC are broken out
3320 so that this can also be called without constructing an actual CALL_EXPR.
3321 The other arguments and return value are the same as for
3322 expand_builtin_strcpy. */
3325 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3327 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3330 /* Expand a call EXP to the stpcpy builtin.
3331 Return NULL_RTX if we failed the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). */
3336 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3339 location_t loc
= EXPR_LOCATION (exp
);
3341 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3344 dst
= CALL_EXPR_ARG (exp
, 0);
3345 src
= CALL_EXPR_ARG (exp
, 1);
3347 /* If return value is ignored, transform stpcpy into strcpy. */
3348 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3350 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3351 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3352 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3359 /* Ensure we get an actual string whose length can be evaluated at
3360 compile-time, not an expression containing a string. This is
3361 because the latter will potentially produce pessimized code
3362 when used to produce the return value. */
3363 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3364 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3366 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3367 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3368 target
, mode
, /*endp=*/2);
3373 if (TREE_CODE (len
) == INTEGER_CST
)
3375 rtx len_rtx
= expand_normal (len
);
3377 if (CONST_INT_P (len_rtx
))
3379 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3385 if (mode
!= VOIDmode
)
3386 target
= gen_reg_rtx (mode
);
3388 target
= gen_reg_rtx (GET_MODE (ret
));
3390 if (GET_MODE (target
) != GET_MODE (ret
))
3391 ret
= gen_lowpart (GET_MODE (target
), ret
);
3393 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3394 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3402 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3406 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3407 bytes from constant string DATA + OFFSET and return it as target
3411 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3412 enum machine_mode mode
)
3414 const char *str
= (const char *) data
;
3416 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3419 return c_readstr (str
+ offset
, mode
);
3422 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3423 NULL_RTX if we failed the caller should emit a normal call. */
3426 expand_builtin_strncpy (tree exp
, rtx target
)
3428 location_t loc
= EXPR_LOCATION (exp
);
3430 if (validate_arglist (exp
,
3431 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3433 tree dest
= CALL_EXPR_ARG (exp
, 0);
3434 tree src
= CALL_EXPR_ARG (exp
, 1);
3435 tree len
= CALL_EXPR_ARG (exp
, 2);
3436 tree slen
= c_strlen (src
, 1);
3438 /* We must be passed a constant len and src parameter. */
3439 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3442 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3444 /* We're required to pad with trailing zeros if the requested
3445 len is greater than strlen(s2)+1. In that case try to
3446 use store_by_pieces, if it fails, punt. */
3447 if (tree_int_cst_lt (slen
, len
))
3449 unsigned int dest_align
= get_pointer_alignment (dest
);
3450 const char *p
= c_getstr (src
);
3453 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3454 || !can_store_by_pieces (tree_low_cst (len
, 1),
3455 builtin_strncpy_read_str
,
3456 CONST_CAST (char *, p
),
3460 dest_mem
= get_memory_rtx (dest
, len
);
3461 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3462 builtin_strncpy_read_str
,
3463 CONST_CAST (char *, p
), dest_align
, false, 0);
3464 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3465 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3472 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3473 bytes from constant string DATA + OFFSET and return it as target
3477 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3478 enum machine_mode mode
)
3480 const char *c
= (const char *) data
;
3481 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3483 memset (p
, *c
, GET_MODE_SIZE (mode
));
3485 return c_readstr (p
, mode
);
3488 /* Callback routine for store_by_pieces. Return the RTL of a register
3489 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3490 char value given in the RTL register data. For example, if mode is
3491 4 bytes wide, return the RTL for 0x01010101*data. */
3494 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3495 enum machine_mode mode
)
3501 size
= GET_MODE_SIZE (mode
);
3505 p
= XALLOCAVEC (char, size
);
3506 memset (p
, 1, size
);
3507 coeff
= c_readstr (p
, mode
);
3509 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3510 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3511 return force_reg (mode
, target
);
3514 /* Expand expression EXP, which is a call to the memset builtin. Return
3515 NULL_RTX if we failed the caller should emit a normal call, otherwise
3516 try to get the result in TARGET, if convenient (and in mode MODE if that's
3520 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3522 if (!validate_arglist (exp
,
3523 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3527 tree dest
= CALL_EXPR_ARG (exp
, 0);
3528 tree val
= CALL_EXPR_ARG (exp
, 1);
3529 tree len
= CALL_EXPR_ARG (exp
, 2);
3530 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3534 /* Helper function to do the actual work for expand_builtin_memset. The
3535 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3536 so that this can also be called without constructing an actual CALL_EXPR.
3537 The other arguments and return value are the same as for
3538 expand_builtin_memset. */
3541 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3542 rtx target
, enum machine_mode mode
, tree orig_exp
)
3545 enum built_in_function fcode
;
3546 enum machine_mode val_mode
;
3548 unsigned int dest_align
;
3549 rtx dest_mem
, dest_addr
, len_rtx
;
3550 HOST_WIDE_INT expected_size
= -1;
3551 unsigned int expected_align
= 0;
3553 dest_align
= get_pointer_alignment (dest
);
3555 /* If DEST is not a pointer type, don't do this operation in-line. */
3556 if (dest_align
== 0)
3559 if (currently_expanding_gimple_stmt
)
3560 stringop_block_profile (currently_expanding_gimple_stmt
,
3561 &expected_align
, &expected_size
);
3563 if (expected_align
< dest_align
)
3564 expected_align
= dest_align
;
3566 /* If the LEN parameter is zero, return DEST. */
3567 if (integer_zerop (len
))
3569 /* Evaluate and ignore VAL in case it has side-effects. */
3570 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3571 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3574 /* Stabilize the arguments in case we fail. */
3575 dest
= builtin_save_expr (dest
);
3576 val
= builtin_save_expr (val
);
3577 len
= builtin_save_expr (len
);
3579 len_rtx
= expand_normal (len
);
3580 dest_mem
= get_memory_rtx (dest
, len
);
3581 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3583 if (TREE_CODE (val
) != INTEGER_CST
)
3587 val_rtx
= expand_normal (val
);
3588 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3590 /* Assume that we can memset by pieces if we can store
3591 * the coefficients by pieces (in the required modes).
3592 * We can't pass builtin_memset_gen_str as that emits RTL. */
3594 if (host_integerp (len
, 1)
3595 && can_store_by_pieces (tree_low_cst (len
, 1),
3596 builtin_memset_read_str
, &c
, dest_align
,
3599 val_rtx
= force_reg (val_mode
, val_rtx
);
3600 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3601 builtin_memset_gen_str
, val_rtx
, dest_align
,
3604 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3605 dest_align
, expected_align
,
3609 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3610 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3614 if (target_char_cast (val
, &c
))
3619 if (host_integerp (len
, 1)
3620 && can_store_by_pieces (tree_low_cst (len
, 1),
3621 builtin_memset_read_str
, &c
, dest_align
,
3623 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3624 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3625 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3626 gen_int_mode (c
, val_mode
),
3627 dest_align
, expected_align
,
3631 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3632 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3636 set_mem_align (dest_mem
, dest_align
);
3637 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3638 CALL_EXPR_TAILCALL (orig_exp
)
3639 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3640 expected_align
, expected_size
);
3644 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3645 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3651 fndecl
= get_callee_fndecl (orig_exp
);
3652 fcode
= DECL_FUNCTION_CODE (fndecl
);
3653 if (fcode
== BUILT_IN_MEMSET
)
3654 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3656 else if (fcode
== BUILT_IN_BZERO
)
3657 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3661 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3662 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3663 return expand_call (fn
, target
, target
== const0_rtx
);
3666 /* Expand expression EXP, which is a call to the bzero builtin. Return
3667 NULL_RTX if we failed the caller should emit a normal call. */
3670 expand_builtin_bzero (tree exp
)
3673 location_t loc
= EXPR_LOCATION (exp
);
3675 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3678 dest
= CALL_EXPR_ARG (exp
, 0);
3679 size
= CALL_EXPR_ARG (exp
, 1);
3681 /* New argument list transforming bzero(ptr x, int y) to
3682 memset(ptr x, int 0, size_t y). This is done this way
3683 so that if it isn't expanded inline, we fallback to
3684 calling bzero instead of memset. */
3686 return expand_builtin_memset_args (dest
, integer_zero_node
,
3687 fold_convert_loc (loc
,
3688 size_type_node
, size
),
3689 const0_rtx
, VOIDmode
, exp
);
3692 /* Expand expression EXP, which is a call to the memcmp built-in function.
3693 Return NULL_RTX if we failed and the caller should emit a normal call,
3694 otherwise try to get the result in TARGET, if convenient (and in mode
3695 MODE, if that's convenient). */
3698 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3699 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3701 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3703 if (!validate_arglist (exp
,
3704 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3707 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3708 implementing memcmp because it will stop if it encounters two
3710 #if defined HAVE_cmpmemsi
3712 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3715 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3716 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3717 tree len
= CALL_EXPR_ARG (exp
, 2);
3719 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3720 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3721 enum machine_mode insn_mode
;
3724 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3728 /* If we don't have POINTER_TYPE, call the function. */
3729 if (arg1_align
== 0 || arg2_align
== 0)
3732 /* Make a place to write the result of the instruction. */
3735 && REG_P (result
) && GET_MODE (result
) == insn_mode
3736 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3737 result
= gen_reg_rtx (insn_mode
);
3739 arg1_rtx
= get_memory_rtx (arg1
, len
);
3740 arg2_rtx
= get_memory_rtx (arg2
, len
);
3741 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3743 /* Set MEM_SIZE as appropriate. */
3744 if (CONST_INT_P (arg3_rtx
))
3746 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3747 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3751 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3752 GEN_INT (MIN (arg1_align
, arg2_align
)));
3759 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3760 TYPE_MODE (integer_type_node
), 3,
3761 XEXP (arg1_rtx
, 0), Pmode
,
3762 XEXP (arg2_rtx
, 0), Pmode
,
3763 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3764 TYPE_UNSIGNED (sizetype
)),
3765 TYPE_MODE (sizetype
));
3767 /* Return the value in the proper mode for this function. */
3768 mode
= TYPE_MODE (TREE_TYPE (exp
));
3769 if (GET_MODE (result
) == mode
)
3771 else if (target
!= 0)
3773 convert_move (target
, result
, 0);
3777 return convert_to_mode (mode
, result
, 0);
3779 #endif /* HAVE_cmpmemsi. */
3784 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3785 if we failed the caller should emit a normal call, otherwise try to get
3786 the result in TARGET, if convenient. */
3789 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3791 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3794 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3795 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3796 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3798 rtx arg1_rtx
, arg2_rtx
;
3799 rtx result
, insn
= NULL_RTX
;
3801 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3802 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3804 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3805 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3807 /* If we don't have POINTER_TYPE, call the function. */
3808 if (arg1_align
== 0 || arg2_align
== 0)
3811 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3812 arg1
= builtin_save_expr (arg1
);
3813 arg2
= builtin_save_expr (arg2
);
3815 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3816 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3818 #ifdef HAVE_cmpstrsi
3819 /* Try to call cmpstrsi. */
3822 enum machine_mode insn_mode
3823 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3825 /* Make a place to write the result of the instruction. */
3828 && REG_P (result
) && GET_MODE (result
) == insn_mode
3829 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3830 result
= gen_reg_rtx (insn_mode
);
3832 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3833 GEN_INT (MIN (arg1_align
, arg2_align
)));
3836 #ifdef HAVE_cmpstrnsi
3837 /* Try to determine at least one length and call cmpstrnsi. */
3838 if (!insn
&& HAVE_cmpstrnsi
)
3843 enum machine_mode insn_mode
3844 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3845 tree len1
= c_strlen (arg1
, 1);
3846 tree len2
= c_strlen (arg2
, 1);
3849 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3851 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3853 /* If we don't have a constant length for the first, use the length
3854 of the second, if we know it. We don't require a constant for
3855 this case; some cost analysis could be done if both are available
3856 but neither is constant. For now, assume they're equally cheap,
3857 unless one has side effects. If both strings have constant lengths,
3864 else if (TREE_SIDE_EFFECTS (len1
))
3866 else if (TREE_SIDE_EFFECTS (len2
))
3868 else if (TREE_CODE (len1
) != INTEGER_CST
)
3870 else if (TREE_CODE (len2
) != INTEGER_CST
)
3872 else if (tree_int_cst_lt (len1
, len2
))
3877 /* If both arguments have side effects, we cannot optimize. */
3878 if (!len
|| TREE_SIDE_EFFECTS (len
))
3881 arg3_rtx
= expand_normal (len
);
3883 /* Make a place to write the result of the instruction. */
3886 && REG_P (result
) && GET_MODE (result
) == insn_mode
3887 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3888 result
= gen_reg_rtx (insn_mode
);
3890 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3891 GEN_INT (MIN (arg1_align
, arg2_align
)));
3897 enum machine_mode mode
;
3900 /* Return the value in the proper mode for this function. */
3901 mode
= TYPE_MODE (TREE_TYPE (exp
));
3902 if (GET_MODE (result
) == mode
)
3905 return convert_to_mode (mode
, result
, 0);
3906 convert_move (target
, result
, 0);
3910 /* Expand the library call ourselves using a stabilized argument
3911 list to avoid re-evaluating the function's arguments twice. */
3912 #ifdef HAVE_cmpstrnsi
3915 fndecl
= get_callee_fndecl (exp
);
3916 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3917 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3918 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3919 return expand_call (fn
, target
, target
== const0_rtx
);
3925 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3926 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3927 the result in TARGET, if convenient. */
3930 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3931 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3933 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3935 if (!validate_arglist (exp
,
3936 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3939 /* If c_strlen can determine an expression for one of the string
3940 lengths, and it doesn't have side effects, then emit cmpstrnsi
3941 using length MIN(strlen(string)+1, arg3). */
3942 #ifdef HAVE_cmpstrnsi
3945 tree len
, len1
, len2
;
3946 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3949 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3950 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3951 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3953 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3954 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3955 enum machine_mode insn_mode
3956 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3958 len1
= c_strlen (arg1
, 1);
3959 len2
= c_strlen (arg2
, 1);
3962 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3964 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3966 /* If we don't have a constant length for the first, use the length
3967 of the second, if we know it. We don't require a constant for
3968 this case; some cost analysis could be done if both are available
3969 but neither is constant. For now, assume they're equally cheap,
3970 unless one has side effects. If both strings have constant lengths,
3977 else if (TREE_SIDE_EFFECTS (len1
))
3979 else if (TREE_SIDE_EFFECTS (len2
))
3981 else if (TREE_CODE (len1
) != INTEGER_CST
)
3983 else if (TREE_CODE (len2
) != INTEGER_CST
)
3985 else if (tree_int_cst_lt (len1
, len2
))
3990 /* If both arguments have side effects, we cannot optimize. */
3991 if (!len
|| TREE_SIDE_EFFECTS (len
))
3994 /* The actual new length parameter is MIN(len,arg3). */
3995 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
3996 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
3998 /* If we don't have POINTER_TYPE, call the function. */
3999 if (arg1_align
== 0 || arg2_align
== 0)
4002 /* Make a place to write the result of the instruction. */
4005 && REG_P (result
) && GET_MODE (result
) == insn_mode
4006 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4007 result
= gen_reg_rtx (insn_mode
);
4009 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4010 arg1
= builtin_save_expr (arg1
);
4011 arg2
= builtin_save_expr (arg2
);
4012 len
= builtin_save_expr (len
);
4014 arg1_rtx
= get_memory_rtx (arg1
, len
);
4015 arg2_rtx
= get_memory_rtx (arg2
, len
);
4016 arg3_rtx
= expand_normal (len
);
4017 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4018 GEN_INT (MIN (arg1_align
, arg2_align
)));
4023 /* Return the value in the proper mode for this function. */
4024 mode
= TYPE_MODE (TREE_TYPE (exp
));
4025 if (GET_MODE (result
) == mode
)
4028 return convert_to_mode (mode
, result
, 0);
4029 convert_move (target
, result
, 0);
4033 /* Expand the library call ourselves using a stabilized argument
4034 list to avoid re-evaluating the function's arguments twice. */
4035 fndecl
= get_callee_fndecl (exp
);
4036 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4038 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4039 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4040 return expand_call (fn
, target
, target
== const0_rtx
);
4046 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4047 if that's convenient. */
4050 expand_builtin_saveregs (void)
4054 /* Don't do __builtin_saveregs more than once in a function.
4055 Save the result of the first call and reuse it. */
4056 if (saveregs_value
!= 0)
4057 return saveregs_value
;
4059 /* When this function is called, it means that registers must be
4060 saved on entry to this function. So we migrate the call to the
4061 first insn of this function. */
4065 /* Do whatever the machine needs done in this case. */
4066 val
= targetm
.calls
.expand_builtin_saveregs ();
4071 saveregs_value
= val
;
4073 /* Put the insns after the NOTE that starts the function. If this
4074 is inside a start_sequence, make the outer-level insn chain current, so
4075 the code is placed at the start of the function. */
4076 push_topmost_sequence ();
4077 emit_insn_after (seq
, entry_of_function ());
4078 pop_topmost_sequence ();
4083 /* Expand a call to __builtin_next_arg. */
4086 expand_builtin_next_arg (void)
4088 /* Checking arguments is already done in fold_builtin_next_arg
4089 that must be called before this function. */
4090 return expand_binop (ptr_mode
, add_optab
,
4091 crtl
->args
.internal_arg_pointer
,
4092 crtl
->args
.arg_offset_rtx
,
4093 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4096 /* Make it easier for the backends by protecting the valist argument
4097 from multiple evaluations. */
4100 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4102 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4104 /* The current way of determining the type of valist is completely
4105 bogus. We should have the information on the va builtin instead. */
4107 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4109 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4111 if (TREE_SIDE_EFFECTS (valist
))
4112 valist
= save_expr (valist
);
4114 /* For this case, the backends will be expecting a pointer to
4115 vatype, but it's possible we've actually been given an array
4116 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4118 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4120 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4121 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4126 tree pt
= build_pointer_type (vatype
);
4130 if (! TREE_SIDE_EFFECTS (valist
))
4133 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4134 TREE_SIDE_EFFECTS (valist
) = 1;
4137 if (TREE_SIDE_EFFECTS (valist
))
4138 valist
= save_expr (valist
);
4139 valist
= fold_build2_loc (loc
, MEM_REF
,
4140 vatype
, valist
, build_int_cst (pt
, 0));
4146 /* The "standard" definition of va_list is void*. */
4149 std_build_builtin_va_list (void)
4151 return ptr_type_node
;
4154 /* The "standard" abi va_list is va_list_type_node. */
4157 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4159 return va_list_type_node
;
4162 /* The "standard" type of va_list is va_list_type_node. */
4165 std_canonical_va_list_type (tree type
)
4169 if (INDIRECT_REF_P (type
))
4170 type
= TREE_TYPE (type
);
4171 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4172 type
= TREE_TYPE (type
);
4173 wtype
= va_list_type_node
;
4175 /* Treat structure va_list types. */
4176 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4177 htype
= TREE_TYPE (htype
);
4178 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4180 /* If va_list is an array type, the argument may have decayed
4181 to a pointer type, e.g. by being passed to another function.
4182 In that case, unwrap both types so that we can compare the
4183 underlying records. */
4184 if (TREE_CODE (htype
) == ARRAY_TYPE
4185 || POINTER_TYPE_P (htype
))
4187 wtype
= TREE_TYPE (wtype
);
4188 htype
= TREE_TYPE (htype
);
4191 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4192 return va_list_type_node
;
4197 /* The "standard" implementation of va_start: just assign `nextarg' to
4201 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4203 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4204 convert_move (va_r
, nextarg
, 0);
4207 /* Expand EXP, a call to __builtin_va_start. */
4210 expand_builtin_va_start (tree exp
)
4214 location_t loc
= EXPR_LOCATION (exp
);
4216 if (call_expr_nargs (exp
) < 2)
4218 error_at (loc
, "too few arguments to function %<va_start%>");
4222 if (fold_builtin_next_arg (exp
, true))
4225 nextarg
= expand_builtin_next_arg ();
4226 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4228 if (targetm
.expand_builtin_va_start
)
4229 targetm
.expand_builtin_va_start (valist
, nextarg
);
4231 std_expand_builtin_va_start (valist
, nextarg
);
4237 /* Return a dummy expression of type TYPE in order to keep going after an
4241 dummy_object (tree type
)
4243 tree t
= build_int_cst (build_pointer_type (type
), 0);
4244 return build2 (MEM_REF
, type
, t
, t
);
4247 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4248 builtin function, but a very special sort of operator. */
4250 enum gimplify_status
4251 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4253 tree promoted_type
, have_va_type
;
4254 tree valist
= TREE_OPERAND (*expr_p
, 0);
4255 tree type
= TREE_TYPE (*expr_p
);
4257 location_t loc
= EXPR_LOCATION (*expr_p
);
4259 /* Verify that valist is of the proper type. */
4260 have_va_type
= TREE_TYPE (valist
);
4261 if (have_va_type
== error_mark_node
)
4263 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4265 if (have_va_type
== NULL_TREE
)
4267 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4271 /* Generate a diagnostic for requesting data of a type that cannot
4272 be passed through `...' due to type promotion at the call site. */
4273 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4276 static bool gave_help
;
4279 /* Unfortunately, this is merely undefined, rather than a constraint
4280 violation, so we cannot make this an error. If this call is never
4281 executed, the program is still strictly conforming. */
4282 warned
= warning_at (loc
, 0,
4283 "%qT is promoted to %qT when passed through %<...%>",
4284 type
, promoted_type
);
4285 if (!gave_help
&& warned
)
4288 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4289 promoted_type
, type
);
4292 /* We can, however, treat "undefined" any way we please.
4293 Call abort to encourage the user to fix the program. */
4295 inform (loc
, "if this code is reached, the program will abort");
4296 /* Before the abort, allow the evaluation of the va_list
4297 expression to exit or longjmp. */
4298 gimplify_and_add (valist
, pre_p
);
4299 t
= build_call_expr_loc (loc
,
4300 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
4301 gimplify_and_add (t
, pre_p
);
4303 /* This is dead code, but go ahead and finish so that the
4304 mode of the result comes out right. */
4305 *expr_p
= dummy_object (type
);
4310 /* Make it easier for the backends by protecting the valist argument
4311 from multiple evaluations. */
4312 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4314 /* For this case, the backends will be expecting a pointer to
4315 TREE_TYPE (abi), but it's possible we've
4316 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4318 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4320 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4321 valist
= fold_convert_loc (loc
, p1
,
4322 build_fold_addr_expr_loc (loc
, valist
));
4325 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4328 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4330 if (!targetm
.gimplify_va_arg_expr
)
4331 /* FIXME: Once most targets are converted we should merely
4332 assert this is non-null. */
4335 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4340 /* Expand EXP, a call to __builtin_va_end. */
4343 expand_builtin_va_end (tree exp
)
4345 tree valist
= CALL_EXPR_ARG (exp
, 0);
4347 /* Evaluate for side effects, if needed. I hate macros that don't
4349 if (TREE_SIDE_EFFECTS (valist
))
4350 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4355 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4356 builtin rather than just as an assignment in stdarg.h because of the
4357 nastiness of array-type va_list types. */
4360 expand_builtin_va_copy (tree exp
)
4363 location_t loc
= EXPR_LOCATION (exp
);
4365 dst
= CALL_EXPR_ARG (exp
, 0);
4366 src
= CALL_EXPR_ARG (exp
, 1);
4368 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4369 src
= stabilize_va_list_loc (loc
, src
, 0);
4371 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4373 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4375 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4376 TREE_SIDE_EFFECTS (t
) = 1;
4377 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4381 rtx dstb
, srcb
, size
;
4383 /* Evaluate to pointers. */
4384 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4385 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4386 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4387 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4389 dstb
= convert_memory_address (Pmode
, dstb
);
4390 srcb
= convert_memory_address (Pmode
, srcb
);
4392 /* "Dereference" to BLKmode memories. */
4393 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4394 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4395 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4396 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4397 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4398 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4401 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4407 /* Expand a call to one of the builtin functions __builtin_frame_address or
4408 __builtin_return_address. */
4411 expand_builtin_frame_address (tree fndecl
, tree exp
)
4413 /* The argument must be a nonnegative integer constant.
4414 It counts the number of frames to scan up the stack.
4415 The value is the return address saved in that frame. */
4416 if (call_expr_nargs (exp
) == 0)
4417 /* Warning about missing arg was already issued. */
4419 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4421 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4422 error ("invalid argument to %<__builtin_frame_address%>");
4424 error ("invalid argument to %<__builtin_return_address%>");
4430 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4431 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4433 /* Some ports cannot access arbitrary stack frames. */
4436 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4437 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4439 warning (0, "unsupported argument to %<__builtin_return_address%>");
4443 /* For __builtin_frame_address, return what we've got. */
4444 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4448 && ! CONSTANT_P (tem
))
4449 tem
= copy_addr_to_reg (tem
);
4454 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4455 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4456 is the same as for allocate_dynamic_stack_space. */
4459 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4465 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4466 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4468 /* Emit normal call if we use mudflap. */
4473 = (alloca_with_align
4474 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4475 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4480 /* Compute the argument. */
4481 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4483 /* Compute the alignment. */
4484 align
= (alloca_with_align
4485 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4486 : BIGGEST_ALIGNMENT
);
4488 /* Allocate the desired space. */
4489 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4490 result
= convert_memory_address (ptr_mode
, result
);
4495 /* Expand a call to bswap builtin in EXP.
4496 Return NULL_RTX if a normal call should be emitted rather than expanding the
4497 function in-line. If convenient, the result should be placed in TARGET.
4498 SUBTARGET may be used as the target for computing one of EXP's operands. */
4501 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4507 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4510 arg
= CALL_EXPR_ARG (exp
, 0);
4511 op0
= expand_expr (arg
,
4512 subtarget
&& GET_MODE (subtarget
) == target_mode
4513 ? subtarget
: NULL_RTX
,
4514 target_mode
, EXPAND_NORMAL
);
4515 if (GET_MODE (op0
) != target_mode
)
4516 op0
= convert_to_mode (target_mode
, op0
, 1);
4518 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4520 gcc_assert (target
);
4522 return convert_to_mode (target_mode
, target
, 1);
4525 /* Expand a call to a unary builtin in EXP.
4526 Return NULL_RTX if a normal call should be emitted rather than expanding the
4527 function in-line. If convenient, the result should be placed in TARGET.
4528 SUBTARGET may be used as the target for computing one of EXP's operands. */
4531 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4532 rtx subtarget
, optab op_optab
)
4536 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4539 /* Compute the argument. */
4540 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4542 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4543 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4544 VOIDmode
, EXPAND_NORMAL
);
4545 /* Compute op, into TARGET if possible.
4546 Set TARGET to wherever the result comes back. */
4547 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4548 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4549 gcc_assert (target
);
4551 return convert_to_mode (target_mode
, target
, 0);
4554 /* Expand a call to __builtin_expect. We just return our argument
4555 as the builtin_expect semantic should've been already executed by
4556 tree branch prediction pass. */
4559 expand_builtin_expect (tree exp
, rtx target
)
4563 if (call_expr_nargs (exp
) < 2)
4565 arg
= CALL_EXPR_ARG (exp
, 0);
4567 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4568 /* When guessing was done, the hints should be already stripped away. */
4569 gcc_assert (!flag_guess_branch_prob
4570 || optimize
== 0 || seen_error ());
4574 /* Expand a call to __builtin_assume_aligned. We just return our first
4575 argument as the builtin_assume_aligned semantic should've been already
4579 expand_builtin_assume_aligned (tree exp
, rtx target
)
4581 if (call_expr_nargs (exp
) < 2)
4583 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4585 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4586 && (call_expr_nargs (exp
) < 3
4587 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4592 expand_builtin_trap (void)
4597 rtx insn
= emit_insn (gen_trap ());
4598 /* For trap insns when not accumulating outgoing args force
4599 REG_ARGS_SIZE note to prevent crossjumping of calls with
4600 different args sizes. */
4601 if (!ACCUMULATE_OUTGOING_ARGS
)
4602 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4606 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4610 /* Expand a call to __builtin_unreachable. We do nothing except emit
4611 a barrier saying that control flow will not pass here.
4613 It is the responsibility of the program being compiled to ensure
4614 that control flow does never reach __builtin_unreachable. */
4616 expand_builtin_unreachable (void)
4621 /* Expand EXP, a call to fabs, fabsf or fabsl.
4622 Return NULL_RTX if a normal call should be emitted rather than expanding
4623 the function inline. If convenient, the result should be placed
4624 in TARGET. SUBTARGET may be used as the target for computing
4628 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4630 enum machine_mode mode
;
4634 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4637 arg
= CALL_EXPR_ARG (exp
, 0);
4638 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4639 mode
= TYPE_MODE (TREE_TYPE (arg
));
4640 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4641 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4644 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4645 Return NULL is a normal call should be emitted rather than expanding the
4646 function inline. If convenient, the result should be placed in TARGET.
4647 SUBTARGET may be used as the target for computing the operand. */
4650 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4655 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4658 arg
= CALL_EXPR_ARG (exp
, 0);
4659 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4661 arg
= CALL_EXPR_ARG (exp
, 1);
4662 op1
= expand_normal (arg
);
4664 return expand_copysign (op0
, op1
, target
);
4667 /* Create a new constant string literal and return a char* pointer to it.
4668 The STRING_CST value is the LEN characters at STR. */
4670 build_string_literal (int len
, const char *str
)
4672 tree t
, elem
, index
, type
;
4674 t
= build_string (len
, str
);
4675 elem
= build_type_variant (char_type_node
, 1, 0);
4676 index
= build_index_type (size_int (len
- 1));
4677 type
= build_array_type (elem
, index
);
4678 TREE_TYPE (t
) = type
;
4679 TREE_CONSTANT (t
) = 1;
4680 TREE_READONLY (t
) = 1;
4681 TREE_STATIC (t
) = 1;
4683 type
= build_pointer_type (elem
);
4684 t
= build1 (ADDR_EXPR
, type
,
4685 build4 (ARRAY_REF
, elem
,
4686 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4690 /* Expand a call to __builtin___clear_cache. */
4693 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4695 #ifndef HAVE_clear_cache
4696 #ifdef CLEAR_INSN_CACHE
4697 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4698 does something. Just do the default expansion to a call to
4702 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4703 does nothing. There is no need to call it. Do nothing. */
4705 #endif /* CLEAR_INSN_CACHE */
4707 /* We have a "clear_cache" insn, and it will handle everything. */
4709 rtx begin_rtx
, end_rtx
;
4711 /* We must not expand to a library call. If we did, any
4712 fallback library function in libgcc that might contain a call to
4713 __builtin___clear_cache() would recurse infinitely. */
4714 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4716 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4720 if (HAVE_clear_cache
)
4722 struct expand_operand ops
[2];
4724 begin
= CALL_EXPR_ARG (exp
, 0);
4725 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4727 end
= CALL_EXPR_ARG (exp
, 1);
4728 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4730 create_address_operand (&ops
[0], begin_rtx
);
4731 create_address_operand (&ops
[1], end_rtx
);
4732 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4736 #endif /* HAVE_clear_cache */
4739 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4742 round_trampoline_addr (rtx tramp
)
4744 rtx temp
, addend
, mask
;
4746 /* If we don't need too much alignment, we'll have been guaranteed
4747 proper alignment by get_trampoline_type. */
4748 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4751 /* Round address up to desired boundary. */
4752 temp
= gen_reg_rtx (Pmode
);
4753 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4754 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4756 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4757 temp
, 0, OPTAB_LIB_WIDEN
);
4758 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4759 temp
, 0, OPTAB_LIB_WIDEN
);
4765 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4767 tree t_tramp
, t_func
, t_chain
;
4768 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4770 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4771 POINTER_TYPE
, VOID_TYPE
))
4774 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4775 t_func
= CALL_EXPR_ARG (exp
, 1);
4776 t_chain
= CALL_EXPR_ARG (exp
, 2);
4778 r_tramp
= expand_normal (t_tramp
);
4779 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4780 MEM_NOTRAP_P (m_tramp
) = 1;
4782 /* If ONSTACK, the TRAMP argument should be the address of a field
4783 within the local function's FRAME decl. Either way, let's see if
4784 we can fill in the MEM_ATTRs for this memory. */
4785 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4786 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4788 /* Creator of a heap trampoline is responsible for making sure the
4789 address is aligned to at least STACK_BOUNDARY. Normally malloc
4790 will ensure this anyhow. */
4791 tmp
= round_trampoline_addr (r_tramp
);
4794 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4795 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4796 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4799 /* The FUNC argument should be the address of the nested function.
4800 Extract the actual function decl to pass to the hook. */
4801 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4802 t_func
= TREE_OPERAND (t_func
, 0);
4803 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4805 r_chain
= expand_normal (t_chain
);
4807 /* Generate insns to initialize the trampoline. */
4808 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4812 trampolines_created
= 1;
4814 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4815 "trampoline generated for nested function %qD", t_func
);
4822 expand_builtin_adjust_trampoline (tree exp
)
4826 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4829 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4830 tramp
= round_trampoline_addr (tramp
);
4831 if (targetm
.calls
.trampoline_adjust_address
)
4832 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4837 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4838 function. The function first checks whether the back end provides
4839 an insn to implement signbit for the respective mode. If not, it
4840 checks whether the floating point format of the value is such that
4841 the sign bit can be extracted. If that is not the case, the
4842 function returns NULL_RTX to indicate that a normal call should be
4843 emitted rather than expanding the function in-line. EXP is the
4844 expression that is a call to the builtin function; if convenient,
4845 the result should be placed in TARGET. */
4847 expand_builtin_signbit (tree exp
, rtx target
)
4849 const struct real_format
*fmt
;
4850 enum machine_mode fmode
, imode
, rmode
;
4853 enum insn_code icode
;
4855 location_t loc
= EXPR_LOCATION (exp
);
4857 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4860 arg
= CALL_EXPR_ARG (exp
, 0);
4861 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4862 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4863 fmt
= REAL_MODE_FORMAT (fmode
);
4865 arg
= builtin_save_expr (arg
);
4867 /* Expand the argument yielding a RTX expression. */
4868 temp
= expand_normal (arg
);
4870 /* Check if the back end provides an insn that handles signbit for the
4872 icode
= optab_handler (signbit_optab
, fmode
);
4873 if (icode
!= CODE_FOR_nothing
)
4875 rtx last
= get_last_insn ();
4876 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4877 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4879 delete_insns_since (last
);
4882 /* For floating point formats without a sign bit, implement signbit
4884 bitpos
= fmt
->signbit_ro
;
4887 /* But we can't do this if the format supports signed zero. */
4888 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4891 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4892 build_real (TREE_TYPE (arg
), dconst0
));
4893 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4896 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4898 imode
= int_mode_for_mode (fmode
);
4899 if (imode
== BLKmode
)
4901 temp
= gen_lowpart (imode
, temp
);
4906 /* Handle targets with different FP word orders. */
4907 if (FLOAT_WORDS_BIG_ENDIAN
)
4908 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4910 word
= bitpos
/ BITS_PER_WORD
;
4911 temp
= operand_subword_force (temp
, word
, fmode
);
4912 bitpos
= bitpos
% BITS_PER_WORD
;
4915 /* Force the intermediate word_mode (or narrower) result into a
4916 register. This avoids attempting to create paradoxical SUBREGs
4917 of floating point modes below. */
4918 temp
= force_reg (imode
, temp
);
4920 /* If the bitpos is within the "result mode" lowpart, the operation
4921 can be implement with a single bitwise AND. Otherwise, we need
4922 a right shift and an AND. */
4924 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4926 double_int mask
= double_int_zero
.set_bit (bitpos
);
4928 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4929 temp
= gen_lowpart (rmode
, temp
);
4930 temp
= expand_binop (rmode
, and_optab
, temp
,
4931 immed_double_int_const (mask
, rmode
),
4932 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4936 /* Perform a logical right shift to place the signbit in the least
4937 significant bit, then truncate the result to the desired mode
4938 and mask just this bit. */
4939 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4940 temp
= gen_lowpart (rmode
, temp
);
4941 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4942 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4948 /* Expand fork or exec calls. TARGET is the desired target of the
4949 call. EXP is the call. FN is the
4950 identificator of the actual function. IGNORE is nonzero if the
4951 value is to be ignored. */
4954 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4959 /* If we are not profiling, just call the function. */
4960 if (!profile_arc_flag
)
4963 /* Otherwise call the wrapper. This should be equivalent for the rest of
4964 compiler, so the code does not diverge, and the wrapper may run the
4965 code necessary for keeping the profiling sane. */
4967 switch (DECL_FUNCTION_CODE (fn
))
4970 id
= get_identifier ("__gcov_fork");
4973 case BUILT_IN_EXECL
:
4974 id
= get_identifier ("__gcov_execl");
4977 case BUILT_IN_EXECV
:
4978 id
= get_identifier ("__gcov_execv");
4981 case BUILT_IN_EXECLP
:
4982 id
= get_identifier ("__gcov_execlp");
4985 case BUILT_IN_EXECLE
:
4986 id
= get_identifier ("__gcov_execle");
4989 case BUILT_IN_EXECVP
:
4990 id
= get_identifier ("__gcov_execvp");
4993 case BUILT_IN_EXECVE
:
4994 id
= get_identifier ("__gcov_execve");
5001 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5002 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5003 DECL_EXTERNAL (decl
) = 1;
5004 TREE_PUBLIC (decl
) = 1;
5005 DECL_ARTIFICIAL (decl
) = 1;
5006 TREE_NOTHROW (decl
) = 1;
5007 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5008 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5009 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5010 return expand_call (call
, target
, ignore
);
5015 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5016 the pointer in these functions is void*, the tree optimizers may remove
5017 casts. The mode computed in expand_builtin isn't reliable either, due
5018 to __sync_bool_compare_and_swap.
5020 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5021 group of builtins. This gives us log2 of the mode size. */
5023 static inline enum machine_mode
5024 get_builtin_sync_mode (int fcode_diff
)
5026 /* The size is not negotiable, so ask not to get BLKmode in return
5027 if the target indicates that a smaller size would be better. */
5028 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5031 /* Expand the memory expression LOC and return the appropriate memory operand
5032 for the builtin_sync operations. */
5035 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5039 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5040 addr
= convert_memory_address (Pmode
, addr
);
5042 /* Note that we explicitly do not want any alias information for this
5043 memory, so that we kill all other live memories. Otherwise we don't
5044 satisfy the full barrier semantics of the intrinsic. */
5045 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5047 /* The alignment needs to be at least according to that of the mode. */
5048 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5049 get_pointer_alignment (loc
)));
5050 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5051 MEM_VOLATILE_P (mem
) = 1;
5056 /* Make sure an argument is in the right mode.
5057 EXP is the tree argument.
5058 MODE is the mode it should be in. */
5061 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5064 enum machine_mode old_mode
;
5066 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5067 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5068 of CONST_INTs, where we know the old_mode only from the call argument. */
5070 old_mode
= GET_MODE (val
);
5071 if (old_mode
== VOIDmode
)
5072 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5073 val
= convert_modes (mode
, old_mode
, val
, 1);
5078 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5079 EXP is the CALL_EXPR. CODE is the rtx code
5080 that corresponds to the arithmetic or logical operation from the name;
5081 an exception here is that NOT actually means NAND. TARGET is an optional
5082 place for us to store the results; AFTER is true if this is the
5083 fetch_and_xxx form. */
5086 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5087 enum rtx_code code
, bool after
,
5091 location_t loc
= EXPR_LOCATION (exp
);
5093 if (code
== NOT
&& warn_sync_nand
)
5095 tree fndecl
= get_callee_fndecl (exp
);
5096 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5098 static bool warned_f_a_n
, warned_n_a_f
;
5102 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5103 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5104 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5105 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5106 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5110 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5111 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5112 warned_f_a_n
= true;
5115 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5116 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5117 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5118 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5119 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5123 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5124 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5125 warned_n_a_f
= true;
5133 /* Expand the operands. */
5134 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5135 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5137 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5141 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5142 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5143 true if this is the boolean form. TARGET is a place for us to store the
5144 results; this is NOT optional if IS_BOOL is true. */
5147 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5148 bool is_bool
, rtx target
)
5150 rtx old_val
, new_val
, mem
;
5153 /* Expand the operands. */
5154 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5155 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5156 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5158 pbool
= poval
= NULL
;
5159 if (target
!= const0_rtx
)
5166 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5167 false, MEMMODEL_SEQ_CST
,
5174 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5175 general form is actually an atomic exchange, and some targets only
5176 support a reduced form with the second argument being a constant 1.
5177 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5181 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5186 /* Expand the operands. */
5187 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5188 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5190 return expand_sync_lock_test_and_set (target
, mem
, val
);
5193 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5196 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5200 /* Expand the operands. */
5201 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5203 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5206 /* Given an integer representing an ``enum memmodel'', verify its
5207 correctness and return the memory model enum. */
5209 static enum memmodel
5210 get_memmodel (tree exp
)
5213 unsigned HOST_WIDE_INT val
;
5215 /* If the parameter is not a constant, it's a run time value so we'll just
5216 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5217 if (TREE_CODE (exp
) != INTEGER_CST
)
5218 return MEMMODEL_SEQ_CST
;
5220 op
= expand_normal (exp
);
5223 if (targetm
.memmodel_check
)
5224 val
= targetm
.memmodel_check (val
);
5225 else if (val
& ~MEMMODEL_MASK
)
5227 warning (OPT_Winvalid_memory_model
,
5228 "Unknown architecture specifier in memory model to builtin.");
5229 return MEMMODEL_SEQ_CST
;
5232 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5234 warning (OPT_Winvalid_memory_model
,
5235 "invalid memory model argument to builtin");
5236 return MEMMODEL_SEQ_CST
;
5239 return (enum memmodel
) val
;
5242 /* Expand the __atomic_exchange intrinsic:
5243 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5244 EXP is the CALL_EXPR.
5245 TARGET is an optional place for us to store the results. */
5248 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5251 enum memmodel model
;
5253 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5254 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5256 error ("invalid memory model for %<__atomic_exchange%>");
5260 if (!flag_inline_atomics
)
5263 /* Expand the operands. */
5264 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5265 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5267 return expand_atomic_exchange (target
, mem
, val
, model
);
5270 /* Expand the __atomic_compare_exchange intrinsic:
5271 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5272 TYPE desired, BOOL weak,
5273 enum memmodel success,
5274 enum memmodel failure)
5275 EXP is the CALL_EXPR.
5276 TARGET is an optional place for us to store the results. */
5279 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5282 rtx expect
, desired
, mem
, oldval
;
5283 enum memmodel success
, failure
;
5287 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5288 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5290 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5291 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5293 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5297 if (failure
> success
)
5299 error ("failure memory model cannot be stronger than success "
5300 "memory model for %<__atomic_compare_exchange%>");
5304 if (!flag_inline_atomics
)
5307 /* Expand the operands. */
5308 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5310 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5311 expect
= convert_memory_address (Pmode
, expect
);
5312 expect
= gen_rtx_MEM (mode
, expect
);
5313 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5315 weak
= CALL_EXPR_ARG (exp
, 3);
5317 if (host_integerp (weak
, 0) && tree_low_cst (weak
, 0) != 0)
5321 if (!expand_atomic_compare_and_swap ((target
== const0_rtx
? NULL
: &target
),
5322 &oldval
, mem
, oldval
, desired
,
5323 is_weak
, success
, failure
))
5326 if (oldval
!= expect
)
5327 emit_move_insn (expect
, oldval
);
5332 /* Expand the __atomic_load intrinsic:
5333 TYPE __atomic_load (TYPE *object, enum memmodel)
5334 EXP is the CALL_EXPR.
5335 TARGET is an optional place for us to store the results. */
5338 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5341 enum memmodel model
;
5343 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5344 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5345 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5347 error ("invalid memory model for %<__atomic_load%>");
5351 if (!flag_inline_atomics
)
5354 /* Expand the operand. */
5355 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5357 return expand_atomic_load (target
, mem
, model
);
5361 /* Expand the __atomic_store intrinsic:
5362 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5363 EXP is the CALL_EXPR.
5364 TARGET is an optional place for us to store the results. */
5367 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5370 enum memmodel model
;
5372 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5373 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5374 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5375 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5377 error ("invalid memory model for %<__atomic_store%>");
5381 if (!flag_inline_atomics
)
5384 /* Expand the operands. */
5385 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5386 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5388 return expand_atomic_store (mem
, val
, model
, false);
5391 /* Expand the __atomic_fetch_XXX intrinsic:
5392 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5393 EXP is the CALL_EXPR.
5394 TARGET is an optional place for us to store the results.
5395 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5396 FETCH_AFTER is true if returning the result of the operation.
5397 FETCH_AFTER is false if returning the value before the operation.
5398 IGNORE is true if the result is not used.
5399 EXT_CALL is the correct builtin for an external call if this cannot be
5400 resolved to an instruction sequence. */
5403 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5404 enum rtx_code code
, bool fetch_after
,
5405 bool ignore
, enum built_in_function ext_call
)
5408 enum memmodel model
;
5412 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5414 /* Expand the operands. */
5415 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5416 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5418 /* Only try generating instructions if inlining is turned on. */
5419 if (flag_inline_atomics
)
5421 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5426 /* Return if a different routine isn't needed for the library call. */
5427 if (ext_call
== BUILT_IN_NONE
)
5430 /* Change the call to the specified function. */
5431 fndecl
= get_callee_fndecl (exp
);
5432 addr
= CALL_EXPR_FN (exp
);
5435 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5436 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5438 /* Expand the call here so we can emit trailing code. */
5439 ret
= expand_call (exp
, target
, ignore
);
5441 /* Replace the original function just in case it matters. */
5442 TREE_OPERAND (addr
, 0) = fndecl
;
5444 /* Then issue the arithmetic correction to return the right result. */
5449 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5451 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5454 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5461 #ifndef HAVE_atomic_clear
5462 # define HAVE_atomic_clear 0
5463 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5466 /* Expand an atomic clear operation.
5467 void _atomic_clear (BOOL *obj, enum memmodel)
5468 EXP is the call expression. */
5471 expand_builtin_atomic_clear (tree exp
)
5473 enum machine_mode mode
;
5475 enum memmodel model
;
5477 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5478 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5479 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5481 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5482 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5484 error ("invalid memory model for %<__atomic_store%>");
5488 if (HAVE_atomic_clear
)
5490 emit_insn (gen_atomic_clear (mem
, model
));
5494 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5495 Failing that, a store is issued by __atomic_store. The only way this can
5496 fail is if the bool type is larger than a word size. Unlikely, but
5497 handle it anyway for completeness. Assume a single threaded model since
5498 there is no atomic support in this case, and no barriers are required. */
5499 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5501 emit_move_insn (mem
, const0_rtx
);
5505 /* Expand an atomic test_and_set operation.
5506 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5507 EXP is the call expression. */
5510 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5513 enum memmodel model
;
5514 enum machine_mode mode
;
5516 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5517 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5518 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5520 return expand_atomic_test_and_set (target
, mem
, model
);
5524 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5525 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5528 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5531 enum machine_mode mode
;
5532 unsigned int mode_align
, type_align
;
5534 if (TREE_CODE (arg0
) != INTEGER_CST
)
5537 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5538 mode
= mode_for_size (size
, MODE_INT
, 0);
5539 mode_align
= GET_MODE_ALIGNMENT (mode
);
5541 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5542 type_align
= mode_align
;
5545 tree ttype
= TREE_TYPE (arg1
);
5547 /* This function is usually invoked and folded immediately by the front
5548 end before anything else has a chance to look at it. The pointer
5549 parameter at this point is usually cast to a void *, so check for that
5550 and look past the cast. */
5551 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5552 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5553 arg1
= TREE_OPERAND (arg1
, 0);
5555 ttype
= TREE_TYPE (arg1
);
5556 gcc_assert (POINTER_TYPE_P (ttype
));
5558 /* Get the underlying type of the object. */
5559 ttype
= TREE_TYPE (ttype
);
5560 type_align
= TYPE_ALIGN (ttype
);
5563 /* If the object has smaller alignment, the the lock free routines cannot
5565 if (type_align
< mode_align
)
5566 return boolean_false_node
;
5568 /* Check if a compare_and_swap pattern exists for the mode which represents
5569 the required size. The pattern is not allowed to fail, so the existence
5570 of the pattern indicates support is present. */
5571 if (can_compare_and_swap_p (mode
, true))
5572 return boolean_true_node
;
5574 return boolean_false_node
;
5577 /* Return true if the parameters to call EXP represent an object which will
5578 always generate lock free instructions. The first argument represents the
5579 size of the object, and the second parameter is a pointer to the object
5580 itself. If NULL is passed for the object, then the result is based on
5581 typical alignment for an object of the specified size. Otherwise return
5585 expand_builtin_atomic_always_lock_free (tree exp
)
5588 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5589 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5591 if (TREE_CODE (arg0
) != INTEGER_CST
)
5593 error ("non-constant argument 1 to __atomic_always_lock_free");
5597 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5598 if (size
== boolean_true_node
)
5603 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5604 is lock free on this architecture. */
5607 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5609 if (!flag_inline_atomics
)
5612 /* If it isn't always lock free, don't generate a result. */
5613 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5614 return boolean_true_node
;
5619 /* Return true if the parameters to call EXP represent an object which will
5620 always generate lock free instructions. The first argument represents the
5621 size of the object, and the second parameter is a pointer to the object
5622 itself. If NULL is passed for the object, then the result is based on
5623 typical alignment for an object of the specified size. Otherwise return
5627 expand_builtin_atomic_is_lock_free (tree exp
)
5630 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5631 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5633 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5635 error ("non-integer argument 1 to __atomic_is_lock_free");
5639 if (!flag_inline_atomics
)
5642 /* If the value is known at compile time, return the RTX for it. */
5643 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5644 if (size
== boolean_true_node
)
5650 /* Expand the __atomic_thread_fence intrinsic:
5651 void __atomic_thread_fence (enum memmodel)
5652 EXP is the CALL_EXPR. */
5655 expand_builtin_atomic_thread_fence (tree exp
)
5657 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5658 expand_mem_thread_fence (model
);
5661 /* Expand the __atomic_signal_fence intrinsic:
5662 void __atomic_signal_fence (enum memmodel)
5663 EXP is the CALL_EXPR. */
5666 expand_builtin_atomic_signal_fence (tree exp
)
5668 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5669 expand_mem_signal_fence (model
);
5672 /* Expand the __sync_synchronize intrinsic. */
5675 expand_builtin_sync_synchronize (void)
5677 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5681 expand_builtin_thread_pointer (tree exp
, rtx target
)
5683 enum insn_code icode
;
5684 if (!validate_arglist (exp
, VOID_TYPE
))
5686 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5687 if (icode
!= CODE_FOR_nothing
)
5689 struct expand_operand op
;
5690 if (!REG_P (target
) || GET_MODE (target
) != Pmode
)
5691 target
= gen_reg_rtx (Pmode
);
5692 create_output_operand (&op
, target
, Pmode
);
5693 expand_insn (icode
, 1, &op
);
5696 error ("__builtin_thread_pointer is not supported on this target");
5701 expand_builtin_set_thread_pointer (tree exp
)
5703 enum insn_code icode
;
5704 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5706 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5707 if (icode
!= CODE_FOR_nothing
)
5709 struct expand_operand op
;
5710 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5711 Pmode
, EXPAND_NORMAL
);
5712 create_input_operand (&op
, val
, Pmode
);
5713 expand_insn (icode
, 1, &op
);
5716 error ("__builtin_set_thread_pointer is not supported on this target");
5720 /* Expand an expression EXP that calls a built-in function,
5721 with result going to TARGET if that's convenient
5722 (and in mode MODE if that's convenient).
5723 SUBTARGET may be used as the target for computing one of EXP's operands.
5724 IGNORE is nonzero if the value is to be ignored. */
5727 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5730 tree fndecl
= get_callee_fndecl (exp
);
5731 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5732 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5735 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5736 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5738 /* When not optimizing, generate calls to library functions for a certain
5741 && !called_as_built_in (fndecl
)
5742 && fcode
!= BUILT_IN_FORK
5743 && fcode
!= BUILT_IN_EXECL
5744 && fcode
!= BUILT_IN_EXECV
5745 && fcode
!= BUILT_IN_EXECLP
5746 && fcode
!= BUILT_IN_EXECLE
5747 && fcode
!= BUILT_IN_EXECVP
5748 && fcode
!= BUILT_IN_EXECVE
5749 && fcode
!= BUILT_IN_ALLOCA
5750 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5751 && fcode
!= BUILT_IN_FREE
)
5752 return expand_call (exp
, target
, ignore
);
5754 /* The built-in function expanders test for target == const0_rtx
5755 to determine whether the function's result will be ignored. */
5757 target
= const0_rtx
;
5759 /* If the result of a pure or const built-in function is ignored, and
5760 none of its arguments are volatile, we can avoid expanding the
5761 built-in call and just evaluate the arguments for side-effects. */
5762 if (target
== const0_rtx
5763 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5764 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5766 bool volatilep
= false;
5768 call_expr_arg_iterator iter
;
5770 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5771 if (TREE_THIS_VOLATILE (arg
))
5779 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5780 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5787 CASE_FLT_FN (BUILT_IN_FABS
):
5788 case BUILT_IN_FABSD32
:
5789 case BUILT_IN_FABSD64
:
5790 case BUILT_IN_FABSD128
:
5791 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5796 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5797 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5802 /* Just do a normal library call if we were unable to fold
5804 CASE_FLT_FN (BUILT_IN_CABS
):
5807 CASE_FLT_FN (BUILT_IN_EXP
):
5808 CASE_FLT_FN (BUILT_IN_EXP10
):
5809 CASE_FLT_FN (BUILT_IN_POW10
):
5810 CASE_FLT_FN (BUILT_IN_EXP2
):
5811 CASE_FLT_FN (BUILT_IN_EXPM1
):
5812 CASE_FLT_FN (BUILT_IN_LOGB
):
5813 CASE_FLT_FN (BUILT_IN_LOG
):
5814 CASE_FLT_FN (BUILT_IN_LOG10
):
5815 CASE_FLT_FN (BUILT_IN_LOG2
):
5816 CASE_FLT_FN (BUILT_IN_LOG1P
):
5817 CASE_FLT_FN (BUILT_IN_TAN
):
5818 CASE_FLT_FN (BUILT_IN_ASIN
):
5819 CASE_FLT_FN (BUILT_IN_ACOS
):
5820 CASE_FLT_FN (BUILT_IN_ATAN
):
5821 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5822 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5823 because of possible accuracy problems. */
5824 if (! flag_unsafe_math_optimizations
)
5826 CASE_FLT_FN (BUILT_IN_SQRT
):
5827 CASE_FLT_FN (BUILT_IN_FLOOR
):
5828 CASE_FLT_FN (BUILT_IN_CEIL
):
5829 CASE_FLT_FN (BUILT_IN_TRUNC
):
5830 CASE_FLT_FN (BUILT_IN_ROUND
):
5831 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5832 CASE_FLT_FN (BUILT_IN_RINT
):
5833 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5838 CASE_FLT_FN (BUILT_IN_FMA
):
5839 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5844 CASE_FLT_FN (BUILT_IN_ILOGB
):
5845 if (! flag_unsafe_math_optimizations
)
5847 CASE_FLT_FN (BUILT_IN_ISINF
):
5848 CASE_FLT_FN (BUILT_IN_FINITE
):
5849 case BUILT_IN_ISFINITE
:
5850 case BUILT_IN_ISNORMAL
:
5851 target
= expand_builtin_interclass_mathfn (exp
, target
);
5856 CASE_FLT_FN (BUILT_IN_ICEIL
):
5857 CASE_FLT_FN (BUILT_IN_LCEIL
):
5858 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5859 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5860 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5861 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5862 target
= expand_builtin_int_roundingfn (exp
, target
);
5867 CASE_FLT_FN (BUILT_IN_IRINT
):
5868 CASE_FLT_FN (BUILT_IN_LRINT
):
5869 CASE_FLT_FN (BUILT_IN_LLRINT
):
5870 CASE_FLT_FN (BUILT_IN_IROUND
):
5871 CASE_FLT_FN (BUILT_IN_LROUND
):
5872 CASE_FLT_FN (BUILT_IN_LLROUND
):
5873 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5878 CASE_FLT_FN (BUILT_IN_POWI
):
5879 target
= expand_builtin_powi (exp
, target
);
5884 CASE_FLT_FN (BUILT_IN_ATAN2
):
5885 CASE_FLT_FN (BUILT_IN_LDEXP
):
5886 CASE_FLT_FN (BUILT_IN_SCALB
):
5887 CASE_FLT_FN (BUILT_IN_SCALBN
):
5888 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5889 if (! flag_unsafe_math_optimizations
)
5892 CASE_FLT_FN (BUILT_IN_FMOD
):
5893 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5894 CASE_FLT_FN (BUILT_IN_DREM
):
5895 CASE_FLT_FN (BUILT_IN_POW
):
5896 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5901 CASE_FLT_FN (BUILT_IN_CEXPI
):
5902 target
= expand_builtin_cexpi (exp
, target
);
5903 gcc_assert (target
);
5906 CASE_FLT_FN (BUILT_IN_SIN
):
5907 CASE_FLT_FN (BUILT_IN_COS
):
5908 if (! flag_unsafe_math_optimizations
)
5910 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5915 CASE_FLT_FN (BUILT_IN_SINCOS
):
5916 if (! flag_unsafe_math_optimizations
)
5918 target
= expand_builtin_sincos (exp
);
5923 case BUILT_IN_APPLY_ARGS
:
5924 return expand_builtin_apply_args ();
5926 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5927 FUNCTION with a copy of the parameters described by
5928 ARGUMENTS, and ARGSIZE. It returns a block of memory
5929 allocated on the stack into which is stored all the registers
5930 that might possibly be used for returning the result of a
5931 function. ARGUMENTS is the value returned by
5932 __builtin_apply_args. ARGSIZE is the number of bytes of
5933 arguments that must be copied. ??? How should this value be
5934 computed? We'll also need a safe worst case value for varargs
5936 case BUILT_IN_APPLY
:
5937 if (!validate_arglist (exp
, POINTER_TYPE
,
5938 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5939 && !validate_arglist (exp
, REFERENCE_TYPE
,
5940 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5946 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5947 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5948 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5950 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5953 /* __builtin_return (RESULT) causes the function to return the
5954 value described by RESULT. RESULT is address of the block of
5955 memory returned by __builtin_apply. */
5956 case BUILT_IN_RETURN
:
5957 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5958 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
5961 case BUILT_IN_SAVEREGS
:
5962 return expand_builtin_saveregs ();
5964 case BUILT_IN_VA_ARG_PACK
:
5965 /* All valid uses of __builtin_va_arg_pack () are removed during
5967 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
5970 case BUILT_IN_VA_ARG_PACK_LEN
:
5971 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5973 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
5976 /* Return the address of the first anonymous stack arg. */
5977 case BUILT_IN_NEXT_ARG
:
5978 if (fold_builtin_next_arg (exp
, false))
5980 return expand_builtin_next_arg ();
5982 case BUILT_IN_CLEAR_CACHE
:
5983 target
= expand_builtin___clear_cache (exp
);
5988 case BUILT_IN_CLASSIFY_TYPE
:
5989 return expand_builtin_classify_type (exp
);
5991 case BUILT_IN_CONSTANT_P
:
5994 case BUILT_IN_FRAME_ADDRESS
:
5995 case BUILT_IN_RETURN_ADDRESS
:
5996 return expand_builtin_frame_address (fndecl
, exp
);
5998 /* Returns the address of the area where the structure is returned.
6000 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6001 if (call_expr_nargs (exp
) != 0
6002 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6003 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6006 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6008 case BUILT_IN_ALLOCA
:
6009 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6010 /* If the allocation stems from the declaration of a variable-sized
6011 object, it cannot accumulate. */
6012 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6017 case BUILT_IN_STACK_SAVE
:
6018 return expand_stack_save ();
6020 case BUILT_IN_STACK_RESTORE
:
6021 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6024 case BUILT_IN_BSWAP16
:
6025 case BUILT_IN_BSWAP32
:
6026 case BUILT_IN_BSWAP64
:
6027 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6032 CASE_INT_FN (BUILT_IN_FFS
):
6033 target
= expand_builtin_unop (target_mode
, exp
, target
,
6034 subtarget
, ffs_optab
);
6039 CASE_INT_FN (BUILT_IN_CLZ
):
6040 target
= expand_builtin_unop (target_mode
, exp
, target
,
6041 subtarget
, clz_optab
);
6046 CASE_INT_FN (BUILT_IN_CTZ
):
6047 target
= expand_builtin_unop (target_mode
, exp
, target
,
6048 subtarget
, ctz_optab
);
6053 CASE_INT_FN (BUILT_IN_CLRSB
):
6054 target
= expand_builtin_unop (target_mode
, exp
, target
,
6055 subtarget
, clrsb_optab
);
6060 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6061 target
= expand_builtin_unop (target_mode
, exp
, target
,
6062 subtarget
, popcount_optab
);
6067 CASE_INT_FN (BUILT_IN_PARITY
):
6068 target
= expand_builtin_unop (target_mode
, exp
, target
,
6069 subtarget
, parity_optab
);
6074 case BUILT_IN_STRLEN
:
6075 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6080 case BUILT_IN_STRCPY
:
6081 target
= expand_builtin_strcpy (exp
, target
);
6086 case BUILT_IN_STRNCPY
:
6087 target
= expand_builtin_strncpy (exp
, target
);
6092 case BUILT_IN_STPCPY
:
6093 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6098 case BUILT_IN_MEMCPY
:
6099 target
= expand_builtin_memcpy (exp
, target
);
6104 case BUILT_IN_MEMPCPY
:
6105 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6110 case BUILT_IN_MEMSET
:
6111 target
= expand_builtin_memset (exp
, target
, mode
);
6116 case BUILT_IN_BZERO
:
6117 target
= expand_builtin_bzero (exp
);
6122 case BUILT_IN_STRCMP
:
6123 target
= expand_builtin_strcmp (exp
, target
);
6128 case BUILT_IN_STRNCMP
:
6129 target
= expand_builtin_strncmp (exp
, target
, mode
);
6135 case BUILT_IN_MEMCMP
:
6136 target
= expand_builtin_memcmp (exp
, target
, mode
);
6141 case BUILT_IN_SETJMP
:
6142 /* This should have been lowered to the builtins below. */
6145 case BUILT_IN_SETJMP_SETUP
:
6146 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6147 and the receiver label. */
6148 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6150 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6151 VOIDmode
, EXPAND_NORMAL
);
6152 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6153 rtx label_r
= label_rtx (label
);
6155 /* This is copied from the handling of non-local gotos. */
6156 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6157 nonlocal_goto_handler_labels
6158 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6159 nonlocal_goto_handler_labels
);
6160 /* ??? Do not let expand_label treat us as such since we would
6161 not want to be both on the list of non-local labels and on
6162 the list of forced labels. */
6163 FORCED_LABEL (label
) = 0;
6168 case BUILT_IN_SETJMP_DISPATCHER
:
6169 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6170 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6172 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6173 rtx label_r
= label_rtx (label
);
6175 /* Remove the dispatcher label from the list of non-local labels
6176 since the receiver labels have been added to it above. */
6177 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6182 case BUILT_IN_SETJMP_RECEIVER
:
6183 /* __builtin_setjmp_receiver is passed the receiver label. */
6184 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6186 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6187 rtx label_r
= label_rtx (label
);
6189 expand_builtin_setjmp_receiver (label_r
);
6194 /* __builtin_longjmp is passed a pointer to an array of five words.
6195 It's similar to the C library longjmp function but works with
6196 __builtin_setjmp above. */
6197 case BUILT_IN_LONGJMP
:
6198 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6200 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6201 VOIDmode
, EXPAND_NORMAL
);
6202 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6204 if (value
!= const1_rtx
)
6206 error ("%<__builtin_longjmp%> second argument must be 1");
6210 expand_builtin_longjmp (buf_addr
, value
);
6215 case BUILT_IN_NONLOCAL_GOTO
:
6216 target
= expand_builtin_nonlocal_goto (exp
);
6221 /* This updates the setjmp buffer that is its argument with the value
6222 of the current stack pointer. */
6223 case BUILT_IN_UPDATE_SETJMP_BUF
:
6224 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6227 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6229 expand_builtin_update_setjmp_buf (buf_addr
);
6235 expand_builtin_trap ();
6238 case BUILT_IN_UNREACHABLE
:
6239 expand_builtin_unreachable ();
6242 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6243 case BUILT_IN_SIGNBITD32
:
6244 case BUILT_IN_SIGNBITD64
:
6245 case BUILT_IN_SIGNBITD128
:
6246 target
= expand_builtin_signbit (exp
, target
);
6251 /* Various hooks for the DWARF 2 __throw routine. */
6252 case BUILT_IN_UNWIND_INIT
:
6253 expand_builtin_unwind_init ();
6255 case BUILT_IN_DWARF_CFA
:
6256 return virtual_cfa_rtx
;
6257 #ifdef DWARF2_UNWIND_INFO
6258 case BUILT_IN_DWARF_SP_COLUMN
:
6259 return expand_builtin_dwarf_sp_column ();
6260 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6261 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6264 case BUILT_IN_FROB_RETURN_ADDR
:
6265 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6266 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6267 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6268 case BUILT_IN_EH_RETURN
:
6269 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6270 CALL_EXPR_ARG (exp
, 1));
6272 #ifdef EH_RETURN_DATA_REGNO
6273 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6274 return expand_builtin_eh_return_data_regno (exp
);
6276 case BUILT_IN_EXTEND_POINTER
:
6277 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6278 case BUILT_IN_EH_POINTER
:
6279 return expand_builtin_eh_pointer (exp
);
6280 case BUILT_IN_EH_FILTER
:
6281 return expand_builtin_eh_filter (exp
);
6282 case BUILT_IN_EH_COPY_VALUES
:
6283 return expand_builtin_eh_copy_values (exp
);
6285 case BUILT_IN_VA_START
:
6286 return expand_builtin_va_start (exp
);
6287 case BUILT_IN_VA_END
:
6288 return expand_builtin_va_end (exp
);
6289 case BUILT_IN_VA_COPY
:
6290 return expand_builtin_va_copy (exp
);
6291 case BUILT_IN_EXPECT
:
6292 return expand_builtin_expect (exp
, target
);
6293 case BUILT_IN_ASSUME_ALIGNED
:
6294 return expand_builtin_assume_aligned (exp
, target
);
6295 case BUILT_IN_PREFETCH
:
6296 expand_builtin_prefetch (exp
);
6299 case BUILT_IN_INIT_TRAMPOLINE
:
6300 return expand_builtin_init_trampoline (exp
, true);
6301 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6302 return expand_builtin_init_trampoline (exp
, false);
6303 case BUILT_IN_ADJUST_TRAMPOLINE
:
6304 return expand_builtin_adjust_trampoline (exp
);
6307 case BUILT_IN_EXECL
:
6308 case BUILT_IN_EXECV
:
6309 case BUILT_IN_EXECLP
:
6310 case BUILT_IN_EXECLE
:
6311 case BUILT_IN_EXECVP
:
6312 case BUILT_IN_EXECVE
:
6313 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6318 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6319 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6320 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6321 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6322 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6323 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6324 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6329 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6330 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6331 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6332 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6333 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6334 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6335 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6340 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6341 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6342 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6343 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6344 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6345 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6346 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6351 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6352 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6353 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6354 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6355 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6356 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6357 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6362 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6363 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6364 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6365 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6366 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6367 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6368 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6373 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6374 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6375 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6376 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6377 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6378 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6379 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6384 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6385 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6386 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6387 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6388 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6389 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6390 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6395 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6396 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6397 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6398 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6399 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6400 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6401 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6406 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6407 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6408 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6409 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6410 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6411 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6412 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6417 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6418 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6419 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6420 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6421 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6422 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6423 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6428 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6429 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6430 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6431 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6432 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6433 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6434 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6439 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6440 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6441 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6442 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6443 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6444 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6445 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6450 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6451 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6452 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6453 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6454 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6455 if (mode
== VOIDmode
)
6456 mode
= TYPE_MODE (boolean_type_node
);
6457 if (!target
|| !register_operand (target
, mode
))
6458 target
= gen_reg_rtx (mode
);
6460 mode
= get_builtin_sync_mode
6461 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6462 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6467 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6468 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6469 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6470 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6471 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6472 mode
= get_builtin_sync_mode
6473 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6474 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6479 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6480 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6481 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6482 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6483 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6484 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6485 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6490 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6491 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6492 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6493 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6494 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6495 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6496 expand_builtin_sync_lock_release (mode
, exp
);
6499 case BUILT_IN_SYNC_SYNCHRONIZE
:
6500 expand_builtin_sync_synchronize ();
6503 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6504 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6505 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6506 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6507 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6508 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6509 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6514 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6515 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6516 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6517 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6518 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6520 unsigned int nargs
, z
;
6521 vec
<tree
, va_gc
> *vec
;
6524 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6525 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6529 /* If this is turned into an external library call, the weak parameter
6530 must be dropped to match the expected parameter list. */
6531 nargs
= call_expr_nargs (exp
);
6532 vec_alloc (vec
, nargs
- 1);
6533 for (z
= 0; z
< 3; z
++)
6534 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6535 /* Skip the boolean weak parameter. */
6536 for (z
= 4; z
< 6; z
++)
6537 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6538 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6542 case BUILT_IN_ATOMIC_LOAD_1
:
6543 case BUILT_IN_ATOMIC_LOAD_2
:
6544 case BUILT_IN_ATOMIC_LOAD_4
:
6545 case BUILT_IN_ATOMIC_LOAD_8
:
6546 case BUILT_IN_ATOMIC_LOAD_16
:
6547 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6548 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6553 case BUILT_IN_ATOMIC_STORE_1
:
6554 case BUILT_IN_ATOMIC_STORE_2
:
6555 case BUILT_IN_ATOMIC_STORE_4
:
6556 case BUILT_IN_ATOMIC_STORE_8
:
6557 case BUILT_IN_ATOMIC_STORE_16
:
6558 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6559 target
= expand_builtin_atomic_store (mode
, exp
);
6564 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6565 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6566 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6567 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6568 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6570 enum built_in_function lib
;
6571 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6572 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6573 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6574 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6580 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6581 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6582 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6583 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6584 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6586 enum built_in_function lib
;
6587 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6588 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6589 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6590 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6596 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6597 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6598 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6599 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6600 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6602 enum built_in_function lib
;
6603 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6604 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6605 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6606 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6612 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6613 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6614 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6615 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6616 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6618 enum built_in_function lib
;
6619 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6620 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6621 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6622 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6628 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6629 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6630 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6631 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6632 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6634 enum built_in_function lib
;
6635 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6636 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6637 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6638 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6644 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6645 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6646 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6647 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6648 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6650 enum built_in_function lib
;
6651 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6652 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6653 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6654 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6660 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6661 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6662 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6663 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6664 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6665 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6666 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6667 ignore
, BUILT_IN_NONE
);
6672 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6673 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6674 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6675 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6676 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6677 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6678 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6679 ignore
, BUILT_IN_NONE
);
6684 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6685 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6686 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6687 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6688 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6689 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6690 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6691 ignore
, BUILT_IN_NONE
);
6696 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6697 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6698 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6699 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6700 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6701 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6702 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6703 ignore
, BUILT_IN_NONE
);
6708 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6709 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6710 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6711 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6712 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6713 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6714 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6715 ignore
, BUILT_IN_NONE
);
6720 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6721 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6722 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6723 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6724 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6725 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6726 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6727 ignore
, BUILT_IN_NONE
);
6732 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6733 return expand_builtin_atomic_test_and_set (exp
, target
);
6735 case BUILT_IN_ATOMIC_CLEAR
:
6736 return expand_builtin_atomic_clear (exp
);
6738 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6739 return expand_builtin_atomic_always_lock_free (exp
);
6741 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6742 target
= expand_builtin_atomic_is_lock_free (exp
);
6747 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6748 expand_builtin_atomic_thread_fence (exp
);
6751 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6752 expand_builtin_atomic_signal_fence (exp
);
6755 case BUILT_IN_OBJECT_SIZE
:
6756 return expand_builtin_object_size (exp
);
6758 case BUILT_IN_MEMCPY_CHK
:
6759 case BUILT_IN_MEMPCPY_CHK
:
6760 case BUILT_IN_MEMMOVE_CHK
:
6761 case BUILT_IN_MEMSET_CHK
:
6762 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6767 case BUILT_IN_STRCPY_CHK
:
6768 case BUILT_IN_STPCPY_CHK
:
6769 case BUILT_IN_STRNCPY_CHK
:
6770 case BUILT_IN_STPNCPY_CHK
:
6771 case BUILT_IN_STRCAT_CHK
:
6772 case BUILT_IN_STRNCAT_CHK
:
6773 case BUILT_IN_SNPRINTF_CHK
:
6774 case BUILT_IN_VSNPRINTF_CHK
:
6775 maybe_emit_chk_warning (exp
, fcode
);
6778 case BUILT_IN_SPRINTF_CHK
:
6779 case BUILT_IN_VSPRINTF_CHK
:
6780 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6784 if (warn_free_nonheap_object
)
6785 maybe_emit_free_warning (exp
);
6788 case BUILT_IN_THREAD_POINTER
:
6789 return expand_builtin_thread_pointer (exp
, target
);
6791 case BUILT_IN_SET_THREAD_POINTER
:
6792 expand_builtin_set_thread_pointer (exp
);
6795 default: /* just do library call, if unknown builtin */
6799 /* The switch statement above can drop through to cause the function
6800 to be called normally. */
6801 return expand_call (exp
, target
, ignore
);
6804 /* Determine whether a tree node represents a call to a built-in
6805 function. If the tree T is a call to a built-in function with
6806 the right number of arguments of the appropriate types, return
6807 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6808 Otherwise the return value is END_BUILTINS. */
6810 enum built_in_function
6811 builtin_mathfn_code (const_tree t
)
6813 const_tree fndecl
, arg
, parmlist
;
6814 const_tree argtype
, parmtype
;
6815 const_call_expr_arg_iterator iter
;
6817 if (TREE_CODE (t
) != CALL_EXPR
6818 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6819 return END_BUILTINS
;
6821 fndecl
= get_callee_fndecl (t
);
6822 if (fndecl
== NULL_TREE
6823 || TREE_CODE (fndecl
) != FUNCTION_DECL
6824 || ! DECL_BUILT_IN (fndecl
)
6825 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6826 return END_BUILTINS
;
6828 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6829 init_const_call_expr_arg_iterator (t
, &iter
);
6830 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6832 /* If a function doesn't take a variable number of arguments,
6833 the last element in the list will have type `void'. */
6834 parmtype
= TREE_VALUE (parmlist
);
6835 if (VOID_TYPE_P (parmtype
))
6837 if (more_const_call_expr_args_p (&iter
))
6838 return END_BUILTINS
;
6839 return DECL_FUNCTION_CODE (fndecl
);
6842 if (! more_const_call_expr_args_p (&iter
))
6843 return END_BUILTINS
;
6845 arg
= next_const_call_expr_arg (&iter
);
6846 argtype
= TREE_TYPE (arg
);
6848 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6850 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6851 return END_BUILTINS
;
6853 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6855 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6856 return END_BUILTINS
;
6858 else if (POINTER_TYPE_P (parmtype
))
6860 if (! POINTER_TYPE_P (argtype
))
6861 return END_BUILTINS
;
6863 else if (INTEGRAL_TYPE_P (parmtype
))
6865 if (! INTEGRAL_TYPE_P (argtype
))
6866 return END_BUILTINS
;
6869 return END_BUILTINS
;
6872 /* Variable-length argument list. */
6873 return DECL_FUNCTION_CODE (fndecl
);
6876 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6877 evaluate to a constant. */
6880 fold_builtin_constant_p (tree arg
)
6882 /* We return 1 for a numeric type that's known to be a constant
6883 value at compile-time or for an aggregate type that's a
6884 literal constant. */
6887 /* If we know this is a constant, emit the constant of one. */
6888 if (CONSTANT_CLASS_P (arg
)
6889 || (TREE_CODE (arg
) == CONSTRUCTOR
6890 && TREE_CONSTANT (arg
)))
6891 return integer_one_node
;
6892 if (TREE_CODE (arg
) == ADDR_EXPR
)
6894 tree op
= TREE_OPERAND (arg
, 0);
6895 if (TREE_CODE (op
) == STRING_CST
6896 || (TREE_CODE (op
) == ARRAY_REF
6897 && integer_zerop (TREE_OPERAND (op
, 1))
6898 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6899 return integer_one_node
;
6902 /* If this expression has side effects, show we don't know it to be a
6903 constant. Likewise if it's a pointer or aggregate type since in
6904 those case we only want literals, since those are only optimized
6905 when generating RTL, not later.
6906 And finally, if we are compiling an initializer, not code, we
6907 need to return a definite result now; there's not going to be any
6908 more optimization done. */
6909 if (TREE_SIDE_EFFECTS (arg
)
6910 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6911 || POINTER_TYPE_P (TREE_TYPE (arg
))
6913 || folding_initializer
6914 || force_folding_builtin_constant_p
)
6915 return integer_zero_node
;
6920 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6921 return it as a truthvalue. */
6924 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6926 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6928 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6929 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6930 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6931 pred_type
= TREE_VALUE (arg_types
);
6932 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6934 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6935 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6936 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
6938 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6939 build_int_cst (ret_type
, 0));
6942 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6943 NULL_TREE if no simplification is possible. */
6946 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
6948 tree inner
, fndecl
, inner_arg0
;
6949 enum tree_code code
;
6951 /* Distribute the expected value over short-circuiting operators.
6952 See through the cast from truthvalue_type_node to long. */
6954 while (TREE_CODE (inner_arg0
) == NOP_EXPR
6955 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
6956 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
6957 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
6959 /* If this is a builtin_expect within a builtin_expect keep the
6960 inner one. See through a comparison against a constant. It
6961 might have been added to create a thruthvalue. */
6964 if (COMPARISON_CLASS_P (inner
)
6965 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6966 inner
= TREE_OPERAND (inner
, 0);
6968 if (TREE_CODE (inner
) == CALL_EXPR
6969 && (fndecl
= get_callee_fndecl (inner
))
6970 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6971 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
6975 code
= TREE_CODE (inner
);
6976 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6978 tree op0
= TREE_OPERAND (inner
, 0);
6979 tree op1
= TREE_OPERAND (inner
, 1);
6981 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
6982 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
6983 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
6985 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
6988 /* If the argument isn't invariant then there's nothing else we can do. */
6989 if (!TREE_CONSTANT (inner_arg0
))
6992 /* If we expect that a comparison against the argument will fold to
6993 a constant return the constant. In practice, this means a true
6994 constant or the address of a non-weak symbol. */
6997 if (TREE_CODE (inner
) == ADDR_EXPR
)
7001 inner
= TREE_OPERAND (inner
, 0);
7003 while (TREE_CODE (inner
) == COMPONENT_REF
7004 || TREE_CODE (inner
) == ARRAY_REF
);
7005 if ((TREE_CODE (inner
) == VAR_DECL
7006 || TREE_CODE (inner
) == FUNCTION_DECL
)
7007 && DECL_WEAK (inner
))
7011 /* Otherwise, ARG0 already has the proper type for the return value. */
7015 /* Fold a call to __builtin_classify_type with argument ARG. */
7018 fold_builtin_classify_type (tree arg
)
7021 return build_int_cst (integer_type_node
, no_type_class
);
7023 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7026 /* Fold a call to __builtin_strlen with argument ARG. */
7029 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7031 if (!validate_arg (arg
, POINTER_TYPE
))
7035 tree len
= c_strlen (arg
, 0);
7038 return fold_convert_loc (loc
, type
, len
);
7044 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7047 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7049 REAL_VALUE_TYPE real
;
7051 /* __builtin_inff is intended to be usable to define INFINITY on all
7052 targets. If an infinity is not available, INFINITY expands "to a
7053 positive constant of type float that overflows at translation
7054 time", footnote "In this case, using INFINITY will violate the
7055 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7056 Thus we pedwarn to ensure this constraint violation is
7058 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7059 pedwarn (loc
, 0, "target format does not support infinity");
7062 return build_real (type
, real
);
7065 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7068 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7070 REAL_VALUE_TYPE real
;
7073 if (!validate_arg (arg
, POINTER_TYPE
))
7075 str
= c_getstr (arg
);
7079 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7082 return build_real (type
, real
);
7085 /* Return true if the floating point expression T has an integer value.
7086 We also allow +Inf, -Inf and NaN to be considered integer values. */
7089 integer_valued_real_p (tree t
)
7091 switch (TREE_CODE (t
))
7098 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7103 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7110 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7111 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7114 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7115 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7118 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7122 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7123 if (TREE_CODE (type
) == INTEGER_TYPE
)
7125 if (TREE_CODE (type
) == REAL_TYPE
)
7126 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7131 switch (builtin_mathfn_code (t
))
7133 CASE_FLT_FN (BUILT_IN_CEIL
):
7134 CASE_FLT_FN (BUILT_IN_FLOOR
):
7135 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7136 CASE_FLT_FN (BUILT_IN_RINT
):
7137 CASE_FLT_FN (BUILT_IN_ROUND
):
7138 CASE_FLT_FN (BUILT_IN_TRUNC
):
7141 CASE_FLT_FN (BUILT_IN_FMIN
):
7142 CASE_FLT_FN (BUILT_IN_FMAX
):
7143 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7144 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7157 /* FNDECL is assumed to be a builtin where truncation can be propagated
7158 across (for instance floor((double)f) == (double)floorf (f).
7159 Do the transformation for a call with argument ARG. */
7162 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7164 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7166 if (!validate_arg (arg
, REAL_TYPE
))
7169 /* Integer rounding functions are idempotent. */
7170 if (fcode
== builtin_mathfn_code (arg
))
7173 /* If argument is already integer valued, and we don't need to worry
7174 about setting errno, there's no need to perform rounding. */
7175 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7180 tree arg0
= strip_float_extensions (arg
);
7181 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7182 tree newtype
= TREE_TYPE (arg0
);
7185 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7186 && (decl
= mathfn_built_in (newtype
, fcode
)))
7187 return fold_convert_loc (loc
, ftype
,
7188 build_call_expr_loc (loc
, decl
, 1,
7189 fold_convert_loc (loc
,
7196 /* FNDECL is assumed to be builtin which can narrow the FP type of
7197 the argument, for instance lround((double)f) -> lroundf (f).
7198 Do the transformation for a call with argument ARG. */
7201 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7203 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7205 if (!validate_arg (arg
, REAL_TYPE
))
7208 /* If argument is already integer valued, and we don't need to worry
7209 about setting errno, there's no need to perform rounding. */
7210 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7211 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7212 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7216 tree ftype
= TREE_TYPE (arg
);
7217 tree arg0
= strip_float_extensions (arg
);
7218 tree newtype
= TREE_TYPE (arg0
);
7221 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7222 && (decl
= mathfn_built_in (newtype
, fcode
)))
7223 return build_call_expr_loc (loc
, decl
, 1,
7224 fold_convert_loc (loc
, newtype
, arg0
));
7227 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7228 sizeof (int) == sizeof (long). */
7229 if (TYPE_PRECISION (integer_type_node
)
7230 == TYPE_PRECISION (long_integer_type_node
))
7232 tree newfn
= NULL_TREE
;
7235 CASE_FLT_FN (BUILT_IN_ICEIL
):
7236 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7239 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7240 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7243 CASE_FLT_FN (BUILT_IN_IROUND
):
7244 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7247 CASE_FLT_FN (BUILT_IN_IRINT
):
7248 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7257 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7258 return fold_convert_loc (loc
,
7259 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7263 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7264 sizeof (long long) == sizeof (long). */
7265 if (TYPE_PRECISION (long_long_integer_type_node
)
7266 == TYPE_PRECISION (long_integer_type_node
))
7268 tree newfn
= NULL_TREE
;
7271 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7272 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7275 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7276 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7279 CASE_FLT_FN (BUILT_IN_LLROUND
):
7280 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7283 CASE_FLT_FN (BUILT_IN_LLRINT
):
7284 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7293 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7294 return fold_convert_loc (loc
,
7295 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7302 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7303 return type. Return NULL_TREE if no simplification can be made. */
7306 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7310 if (!validate_arg (arg
, COMPLEX_TYPE
)
7311 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7314 /* Calculate the result when the argument is a constant. */
7315 if (TREE_CODE (arg
) == COMPLEX_CST
7316 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7320 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7322 tree real
= TREE_OPERAND (arg
, 0);
7323 tree imag
= TREE_OPERAND (arg
, 1);
7325 /* If either part is zero, cabs is fabs of the other. */
7326 if (real_zerop (real
))
7327 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7328 if (real_zerop (imag
))
7329 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7331 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7332 if (flag_unsafe_math_optimizations
7333 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7335 const REAL_VALUE_TYPE sqrt2_trunc
7336 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7338 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7339 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7340 build_real (type
, sqrt2_trunc
));
7344 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7345 if (TREE_CODE (arg
) == NEGATE_EXPR
7346 || TREE_CODE (arg
) == CONJ_EXPR
)
7347 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7349 /* Don't do this when optimizing for size. */
7350 if (flag_unsafe_math_optimizations
7351 && optimize
&& optimize_function_for_speed_p (cfun
))
7353 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7355 if (sqrtfn
!= NULL_TREE
)
7357 tree rpart
, ipart
, result
;
7359 arg
= builtin_save_expr (arg
);
7361 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7362 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7364 rpart
= builtin_save_expr (rpart
);
7365 ipart
= builtin_save_expr (ipart
);
7367 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7368 fold_build2_loc (loc
, MULT_EXPR
, type
,
7370 fold_build2_loc (loc
, MULT_EXPR
, type
,
7373 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7380 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7381 complex tree type of the result. If NEG is true, the imaginary
7382 zero is negative. */
7385 build_complex_cproj (tree type
, bool neg
)
7387 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7391 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7392 build_real (TREE_TYPE (type
), rzero
));
7395 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7396 return type. Return NULL_TREE if no simplification can be made. */
7399 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7401 if (!validate_arg (arg
, COMPLEX_TYPE
)
7402 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7405 /* If there are no infinities, return arg. */
7406 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7407 return non_lvalue_loc (loc
, arg
);
7409 /* Calculate the result when the argument is a constant. */
7410 if (TREE_CODE (arg
) == COMPLEX_CST
)
7412 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7413 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7415 if (real_isinf (real
) || real_isinf (imag
))
7416 return build_complex_cproj (type
, imag
->sign
);
7420 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7422 tree real
= TREE_OPERAND (arg
, 0);
7423 tree imag
= TREE_OPERAND (arg
, 1);
7428 /* If the real part is inf and the imag part is known to be
7429 nonnegative, return (inf + 0i). Remember side-effects are
7430 possible in the imag part. */
7431 if (TREE_CODE (real
) == REAL_CST
7432 && real_isinf (TREE_REAL_CST_PTR (real
))
7433 && tree_expr_nonnegative_p (imag
))
7434 return omit_one_operand_loc (loc
, type
,
7435 build_complex_cproj (type
, false),
7438 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7439 Remember side-effects are possible in the real part. */
7440 if (TREE_CODE (imag
) == REAL_CST
7441 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7443 omit_one_operand_loc (loc
, type
,
7444 build_complex_cproj (type
, TREE_REAL_CST_PTR
7445 (imag
)->sign
), arg
);
7451 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7452 Return NULL_TREE if no simplification can be made. */
7455 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7458 enum built_in_function fcode
;
7461 if (!validate_arg (arg
, REAL_TYPE
))
7464 /* Calculate the result when the argument is a constant. */
7465 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7468 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7469 fcode
= builtin_mathfn_code (arg
);
7470 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7472 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7473 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7474 CALL_EXPR_ARG (arg
, 0),
7475 build_real (type
, dconsthalf
));
7476 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7479 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7480 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7482 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7486 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7488 /* The inner root was either sqrt or cbrt. */
7489 /* This was a conditional expression but it triggered a bug
7491 REAL_VALUE_TYPE dconstroot
;
7492 if (BUILTIN_SQRT_P (fcode
))
7493 dconstroot
= dconsthalf
;
7495 dconstroot
= dconst_third ();
7497 /* Adjust for the outer root. */
7498 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7499 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7500 tree_root
= build_real (type
, dconstroot
);
7501 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7505 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7506 if (flag_unsafe_math_optimizations
7507 && (fcode
== BUILT_IN_POW
7508 || fcode
== BUILT_IN_POWF
7509 || fcode
== BUILT_IN_POWL
))
7511 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7512 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7513 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7515 if (!tree_expr_nonnegative_p (arg0
))
7516 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7517 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7518 build_real (type
, dconsthalf
));
7519 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7525 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7526 Return NULL_TREE if no simplification can be made. */
7529 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7531 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7534 if (!validate_arg (arg
, REAL_TYPE
))
7537 /* Calculate the result when the argument is a constant. */
7538 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7541 if (flag_unsafe_math_optimizations
)
7543 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7544 if (BUILTIN_EXPONENT_P (fcode
))
7546 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7547 const REAL_VALUE_TYPE third_trunc
=
7548 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7549 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7550 CALL_EXPR_ARG (arg
, 0),
7551 build_real (type
, third_trunc
));
7552 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7555 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7556 if (BUILTIN_SQRT_P (fcode
))
7558 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7562 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7564 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7566 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7567 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7568 tree_root
= build_real (type
, dconstroot
);
7569 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7573 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7574 if (BUILTIN_CBRT_P (fcode
))
7576 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7577 if (tree_expr_nonnegative_p (arg0
))
7579 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7584 REAL_VALUE_TYPE dconstroot
;
7586 real_arithmetic (&dconstroot
, MULT_EXPR
,
7587 dconst_third_ptr (), dconst_third_ptr ());
7588 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7589 tree_root
= build_real (type
, dconstroot
);
7590 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7595 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7596 if (fcode
== BUILT_IN_POW
7597 || fcode
== BUILT_IN_POWF
7598 || fcode
== BUILT_IN_POWL
)
7600 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7601 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7602 if (tree_expr_nonnegative_p (arg00
))
7604 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7605 const REAL_VALUE_TYPE dconstroot
7606 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7607 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7608 build_real (type
, dconstroot
));
7609 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7616 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7617 TYPE is the type of the return value. Return NULL_TREE if no
7618 simplification can be made. */
7621 fold_builtin_cos (location_t loc
,
7622 tree arg
, tree type
, tree fndecl
)
7626 if (!validate_arg (arg
, REAL_TYPE
))
7629 /* Calculate the result when the argument is a constant. */
7630 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7633 /* Optimize cos(-x) into cos (x). */
7634 if ((narg
= fold_strip_sign_ops (arg
)))
7635 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7640 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7641 Return NULL_TREE if no simplification can be made. */
7644 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7646 if (validate_arg (arg
, REAL_TYPE
))
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7654 /* Optimize cosh(-x) into cosh (x). */
7655 if ((narg
= fold_strip_sign_ops (arg
)))
7656 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7662 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7663 argument ARG. TYPE is the type of the return value. Return
7664 NULL_TREE if no simplification can be made. */
7667 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7670 if (validate_arg (arg
, COMPLEX_TYPE
)
7671 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7675 /* Calculate the result when the argument is a constant. */
7676 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7679 /* Optimize fn(-x) into fn(x). */
7680 if ((tmp
= fold_strip_sign_ops (arg
)))
7681 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7687 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7688 Return NULL_TREE if no simplification can be made. */
7691 fold_builtin_tan (tree arg
, tree type
)
7693 enum built_in_function fcode
;
7696 if (!validate_arg (arg
, REAL_TYPE
))
7699 /* Calculate the result when the argument is a constant. */
7700 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7703 /* Optimize tan(atan(x)) = x. */
7704 fcode
= builtin_mathfn_code (arg
);
7705 if (flag_unsafe_math_optimizations
7706 && (fcode
== BUILT_IN_ATAN
7707 || fcode
== BUILT_IN_ATANF
7708 || fcode
== BUILT_IN_ATANL
))
7709 return CALL_EXPR_ARG (arg
, 0);
7714 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7715 NULL_TREE if no simplification can be made. */
7718 fold_builtin_sincos (location_t loc
,
7719 tree arg0
, tree arg1
, tree arg2
)
7724 if (!validate_arg (arg0
, REAL_TYPE
)
7725 || !validate_arg (arg1
, POINTER_TYPE
)
7726 || !validate_arg (arg2
, POINTER_TYPE
))
7729 type
= TREE_TYPE (arg0
);
7731 /* Calculate the result when the argument is a constant. */
7732 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7735 /* Canonicalize sincos to cexpi. */
7736 if (!targetm
.libc_has_function (function_c99_math_complex
))
7738 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7742 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7743 call
= builtin_save_expr (call
);
7745 return build2 (COMPOUND_EXPR
, void_type_node
,
7746 build2 (MODIFY_EXPR
, void_type_node
,
7747 build_fold_indirect_ref_loc (loc
, arg1
),
7748 build1 (IMAGPART_EXPR
, type
, call
)),
7749 build2 (MODIFY_EXPR
, void_type_node
,
7750 build_fold_indirect_ref_loc (loc
, arg2
),
7751 build1 (REALPART_EXPR
, type
, call
)));
7754 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7755 NULL_TREE if no simplification can be made. */
7758 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7761 tree realp
, imagp
, ifn
;
7764 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7765 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7772 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7774 /* In case we can figure out the real part of arg0 and it is constant zero
7776 if (!targetm
.libc_has_function (function_c99_math_complex
))
7778 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7782 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7783 && real_zerop (realp
))
7785 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7786 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7789 /* In case we can easily decompose real and imaginary parts split cexp
7790 to exp (r) * cexpi (i). */
7791 if (flag_unsafe_math_optimizations
7794 tree rfn
, rcall
, icall
;
7796 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7800 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7804 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7805 icall
= builtin_save_expr (icall
);
7806 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7807 rcall
= builtin_save_expr (rcall
);
7808 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7809 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7811 fold_build1_loc (loc
, REALPART_EXPR
,
7813 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7815 fold_build1_loc (loc
, IMAGPART_EXPR
,
7822 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7823 Return NULL_TREE if no simplification can be made. */
7826 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7828 if (!validate_arg (arg
, REAL_TYPE
))
7831 /* Optimize trunc of constant value. */
7832 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7834 REAL_VALUE_TYPE r
, x
;
7835 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7837 x
= TREE_REAL_CST (arg
);
7838 real_trunc (&r
, TYPE_MODE (type
), &x
);
7839 return build_real (type
, r
);
7842 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7845 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7846 Return NULL_TREE if no simplification can be made. */
7849 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7851 if (!validate_arg (arg
, REAL_TYPE
))
7854 /* Optimize floor of constant value. */
7855 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7859 x
= TREE_REAL_CST (arg
);
7860 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7862 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7865 real_floor (&r
, TYPE_MODE (type
), &x
);
7866 return build_real (type
, r
);
7870 /* Fold floor (x) where x is nonnegative to trunc (x). */
7871 if (tree_expr_nonnegative_p (arg
))
7873 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7875 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7878 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7881 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7882 Return NULL_TREE if no simplification can be made. */
7885 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7887 if (!validate_arg (arg
, REAL_TYPE
))
7890 /* Optimize ceil of constant value. */
7891 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7895 x
= TREE_REAL_CST (arg
);
7896 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7898 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7901 real_ceil (&r
, TYPE_MODE (type
), &x
);
7902 return build_real (type
, r
);
7906 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7909 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7910 Return NULL_TREE if no simplification can be made. */
7913 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7915 if (!validate_arg (arg
, REAL_TYPE
))
7918 /* Optimize round of constant value. */
7919 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7923 x
= TREE_REAL_CST (arg
);
7924 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7926 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7929 real_round (&r
, TYPE_MODE (type
), &x
);
7930 return build_real (type
, r
);
7934 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7937 /* Fold function call to builtin lround, lroundf or lroundl (or the
7938 corresponding long long versions) and other rounding functions. ARG
7939 is the argument to the call. Return NULL_TREE if no simplification
7943 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7945 if (!validate_arg (arg
, REAL_TYPE
))
7948 /* Optimize lround of constant value. */
7949 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7951 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7953 if (real_isfinite (&x
))
7955 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7956 tree ftype
= TREE_TYPE (arg
);
7960 switch (DECL_FUNCTION_CODE (fndecl
))
7962 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7963 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7964 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7965 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7968 CASE_FLT_FN (BUILT_IN_ICEIL
):
7969 CASE_FLT_FN (BUILT_IN_LCEIL
):
7970 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7971 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7974 CASE_FLT_FN (BUILT_IN_IROUND
):
7975 CASE_FLT_FN (BUILT_IN_LROUND
):
7976 CASE_FLT_FN (BUILT_IN_LLROUND
):
7977 real_round (&r
, TYPE_MODE (ftype
), &x
);
7984 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
7985 if (double_int_fits_to_tree_p (itype
, val
))
7986 return double_int_to_tree (itype
, val
);
7990 switch (DECL_FUNCTION_CODE (fndecl
))
7992 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7993 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7994 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7995 if (tree_expr_nonnegative_p (arg
))
7996 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7997 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8002 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8005 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8006 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8007 the argument to the call. Return NULL_TREE if no simplification can
8011 fold_builtin_bitop (tree fndecl
, tree arg
)
8013 if (!validate_arg (arg
, INTEGER_TYPE
))
8016 /* Optimize for constant argument. */
8017 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8019 HOST_WIDE_INT hi
, width
, result
;
8020 unsigned HOST_WIDE_INT lo
;
8023 type
= TREE_TYPE (arg
);
8024 width
= TYPE_PRECISION (type
);
8025 lo
= TREE_INT_CST_LOW (arg
);
8027 /* Clear all the bits that are beyond the type's precision. */
8028 if (width
> HOST_BITS_PER_WIDE_INT
)
8030 hi
= TREE_INT_CST_HIGH (arg
);
8031 if (width
< HOST_BITS_PER_DOUBLE_INT
)
8032 hi
&= ~(HOST_WIDE_INT_M1U
<< (width
- HOST_BITS_PER_WIDE_INT
));
8037 if (width
< HOST_BITS_PER_WIDE_INT
)
8038 lo
&= ~(HOST_WIDE_INT_M1U
<< width
);
8041 switch (DECL_FUNCTION_CODE (fndecl
))
8043 CASE_INT_FN (BUILT_IN_FFS
):
8045 result
= ffs_hwi (lo
);
8047 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
8052 CASE_INT_FN (BUILT_IN_CLZ
):
8054 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8056 result
= width
- floor_log2 (lo
) - 1;
8057 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8061 CASE_INT_FN (BUILT_IN_CTZ
):
8063 result
= ctz_hwi (lo
);
8065 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
8066 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8070 CASE_INT_FN (BUILT_IN_CLRSB
):
8071 if (width
> 2 * HOST_BITS_PER_WIDE_INT
)
8073 if (width
> HOST_BITS_PER_WIDE_INT
8074 && (hi
& ((unsigned HOST_WIDE_INT
) 1
8075 << (width
- HOST_BITS_PER_WIDE_INT
- 1))) != 0)
8077 hi
= ~hi
& ~(HOST_WIDE_INT_M1U
8078 << (width
- HOST_BITS_PER_WIDE_INT
- 1));
8081 else if (width
<= HOST_BITS_PER_WIDE_INT
8082 && (lo
& ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
8083 lo
= ~lo
& ~(HOST_WIDE_INT_M1U
<< (width
- 1));
8085 result
= width
- floor_log2 (hi
) - 2 - HOST_BITS_PER_WIDE_INT
;
8087 result
= width
- floor_log2 (lo
) - 2;
8092 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8095 result
++, lo
&= lo
- 1;
8097 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8100 CASE_INT_FN (BUILT_IN_PARITY
):
8103 result
++, lo
&= lo
- 1;
8105 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8113 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8119 /* Fold function call to builtin_bswap and the short, long and long long
8120 variants. Return NULL_TREE if no simplification can be made. */
8122 fold_builtin_bswap (tree fndecl
, tree arg
)
8124 if (! validate_arg (arg
, INTEGER_TYPE
))
8127 /* Optimize constant value. */
8128 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8130 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8131 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8132 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8134 width
= TYPE_PRECISION (type
);
8135 lo
= TREE_INT_CST_LOW (arg
);
8136 hi
= TREE_INT_CST_HIGH (arg
);
8138 switch (DECL_FUNCTION_CODE (fndecl
))
8140 case BUILT_IN_BSWAP16
:
8141 case BUILT_IN_BSWAP32
:
8142 case BUILT_IN_BSWAP64
:
8146 for (s
= 0; s
< width
; s
+= 8)
8148 int d
= width
- s
- 8;
8149 unsigned HOST_WIDE_INT byte
;
8151 if (s
< HOST_BITS_PER_WIDE_INT
)
8152 byte
= (lo
>> s
) & 0xff;
8154 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8156 if (d
< HOST_BITS_PER_WIDE_INT
)
8159 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8169 if (width
< HOST_BITS_PER_WIDE_INT
)
8170 return build_int_cst (type
, r_lo
);
8172 return build_int_cst_wide (type
, r_lo
, r_hi
);
8178 /* A subroutine of fold_builtin to fold the various logarithmic
8179 functions. Return NULL_TREE if no simplification can me made.
8180 FUNC is the corresponding MPFR logarithm function. */
8183 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8184 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8186 if (validate_arg (arg
, REAL_TYPE
))
8188 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8190 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8192 /* Calculate the result when the argument is a constant. */
8193 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8196 /* Special case, optimize logN(expN(x)) = x. */
8197 if (flag_unsafe_math_optimizations
8198 && ((func
== mpfr_log
8199 && (fcode
== BUILT_IN_EXP
8200 || fcode
== BUILT_IN_EXPF
8201 || fcode
== BUILT_IN_EXPL
))
8202 || (func
== mpfr_log2
8203 && (fcode
== BUILT_IN_EXP2
8204 || fcode
== BUILT_IN_EXP2F
8205 || fcode
== BUILT_IN_EXP2L
))
8206 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8207 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8209 /* Optimize logN(func()) for various exponential functions. We
8210 want to determine the value "x" and the power "exponent" in
8211 order to transform logN(x**exponent) into exponent*logN(x). */
8212 if (flag_unsafe_math_optimizations
)
8214 tree exponent
= 0, x
= 0;
8218 CASE_FLT_FN (BUILT_IN_EXP
):
8219 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8220 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8222 exponent
= CALL_EXPR_ARG (arg
, 0);
8224 CASE_FLT_FN (BUILT_IN_EXP2
):
8225 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8226 x
= build_real (type
, dconst2
);
8227 exponent
= CALL_EXPR_ARG (arg
, 0);
8229 CASE_FLT_FN (BUILT_IN_EXP10
):
8230 CASE_FLT_FN (BUILT_IN_POW10
):
8231 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8233 REAL_VALUE_TYPE dconst10
;
8234 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8235 x
= build_real (type
, dconst10
);
8237 exponent
= CALL_EXPR_ARG (arg
, 0);
8239 CASE_FLT_FN (BUILT_IN_SQRT
):
8240 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8241 x
= CALL_EXPR_ARG (arg
, 0);
8242 exponent
= build_real (type
, dconsthalf
);
8244 CASE_FLT_FN (BUILT_IN_CBRT
):
8245 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8246 x
= CALL_EXPR_ARG (arg
, 0);
8247 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8250 CASE_FLT_FN (BUILT_IN_POW
):
8251 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8252 x
= CALL_EXPR_ARG (arg
, 0);
8253 exponent
= CALL_EXPR_ARG (arg
, 1);
8259 /* Now perform the optimization. */
8262 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8263 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8271 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8272 NULL_TREE if no simplification can be made. */
8275 fold_builtin_hypot (location_t loc
, tree fndecl
,
8276 tree arg0
, tree arg1
, tree type
)
8278 tree res
, narg0
, narg1
;
8280 if (!validate_arg (arg0
, REAL_TYPE
)
8281 || !validate_arg (arg1
, REAL_TYPE
))
8284 /* Calculate the result when the argument is a constant. */
8285 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8288 /* If either argument to hypot has a negate or abs, strip that off.
8289 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8290 narg0
= fold_strip_sign_ops (arg0
);
8291 narg1
= fold_strip_sign_ops (arg1
);
8294 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8295 narg1
? narg1
: arg1
);
8298 /* If either argument is zero, hypot is fabs of the other. */
8299 if (real_zerop (arg0
))
8300 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8301 else if (real_zerop (arg1
))
8302 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8304 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8305 if (flag_unsafe_math_optimizations
8306 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8308 const REAL_VALUE_TYPE sqrt2_trunc
8309 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8310 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8311 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8312 build_real (type
, sqrt2_trunc
));
8319 /* Fold a builtin function call to pow, powf, or powl. Return
8320 NULL_TREE if no simplification can be made. */
8322 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8326 if (!validate_arg (arg0
, REAL_TYPE
)
8327 || !validate_arg (arg1
, REAL_TYPE
))
8330 /* Calculate the result when the argument is a constant. */
8331 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8334 /* Optimize pow(1.0,y) = 1.0. */
8335 if (real_onep (arg0
))
8336 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8338 if (TREE_CODE (arg1
) == REAL_CST
8339 && !TREE_OVERFLOW (arg1
))
8341 REAL_VALUE_TYPE cint
;
8345 c
= TREE_REAL_CST (arg1
);
8347 /* Optimize pow(x,0.0) = 1.0. */
8348 if (REAL_VALUES_EQUAL (c
, dconst0
))
8349 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8352 /* Optimize pow(x,1.0) = x. */
8353 if (REAL_VALUES_EQUAL (c
, dconst1
))
8356 /* Optimize pow(x,-1.0) = 1.0/x. */
8357 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8358 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8359 build_real (type
, dconst1
), arg0
);
8361 /* Optimize pow(x,0.5) = sqrt(x). */
8362 if (flag_unsafe_math_optimizations
8363 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8365 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8367 if (sqrtfn
!= NULL_TREE
)
8368 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8371 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8372 if (flag_unsafe_math_optimizations
)
8374 const REAL_VALUE_TYPE dconstroot
8375 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8377 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8379 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8380 if (cbrtfn
!= NULL_TREE
)
8381 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8385 /* Check for an integer exponent. */
8386 n
= real_to_integer (&c
);
8387 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8388 if (real_identical (&c
, &cint
))
8390 /* Attempt to evaluate pow at compile-time, unless this should
8391 raise an exception. */
8392 if (TREE_CODE (arg0
) == REAL_CST
8393 && !TREE_OVERFLOW (arg0
)
8395 || (!flag_trapping_math
&& !flag_errno_math
)
8396 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8401 x
= TREE_REAL_CST (arg0
);
8402 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8403 if (flag_unsafe_math_optimizations
|| !inexact
)
8404 return build_real (type
, x
);
8407 /* Strip sign ops from even integer powers. */
8408 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8410 tree narg0
= fold_strip_sign_ops (arg0
);
8412 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8417 if (flag_unsafe_math_optimizations
)
8419 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8421 /* Optimize pow(expN(x),y) = expN(x*y). */
8422 if (BUILTIN_EXPONENT_P (fcode
))
8424 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8425 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8426 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8427 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8430 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8431 if (BUILTIN_SQRT_P (fcode
))
8433 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8434 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8435 build_real (type
, dconsthalf
));
8436 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8439 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8440 if (BUILTIN_CBRT_P (fcode
))
8442 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8443 if (tree_expr_nonnegative_p (arg
))
8445 const REAL_VALUE_TYPE dconstroot
8446 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8447 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8448 build_real (type
, dconstroot
));
8449 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8453 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8454 if (fcode
== BUILT_IN_POW
8455 || fcode
== BUILT_IN_POWF
8456 || fcode
== BUILT_IN_POWL
)
8458 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8459 if (tree_expr_nonnegative_p (arg00
))
8461 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8462 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8463 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8471 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8472 Return NULL_TREE if no simplification can be made. */
8474 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8475 tree arg0
, tree arg1
, tree type
)
8477 if (!validate_arg (arg0
, REAL_TYPE
)
8478 || !validate_arg (arg1
, INTEGER_TYPE
))
8481 /* Optimize pow(1.0,y) = 1.0. */
8482 if (real_onep (arg0
))
8483 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8485 if (host_integerp (arg1
, 0))
8487 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8489 /* Evaluate powi at compile-time. */
8490 if (TREE_CODE (arg0
) == REAL_CST
8491 && !TREE_OVERFLOW (arg0
))
8494 x
= TREE_REAL_CST (arg0
);
8495 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8496 return build_real (type
, x
);
8499 /* Optimize pow(x,0) = 1.0. */
8501 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8504 /* Optimize pow(x,1) = x. */
8508 /* Optimize pow(x,-1) = 1.0/x. */
8510 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8511 build_real (type
, dconst1
), arg0
);
8517 /* A subroutine of fold_builtin to fold the various exponent
8518 functions. Return NULL_TREE if no simplification can be made.
8519 FUNC is the corresponding MPFR exponent function. */
8522 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8523 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8525 if (validate_arg (arg
, REAL_TYPE
))
8527 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8530 /* Calculate the result when the argument is a constant. */
8531 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8534 /* Optimize expN(logN(x)) = x. */
8535 if (flag_unsafe_math_optimizations
)
8537 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8539 if ((func
== mpfr_exp
8540 && (fcode
== BUILT_IN_LOG
8541 || fcode
== BUILT_IN_LOGF
8542 || fcode
== BUILT_IN_LOGL
))
8543 || (func
== mpfr_exp2
8544 && (fcode
== BUILT_IN_LOG2
8545 || fcode
== BUILT_IN_LOG2F
8546 || fcode
== BUILT_IN_LOG2L
))
8547 || (func
== mpfr_exp10
8548 && (fcode
== BUILT_IN_LOG10
8549 || fcode
== BUILT_IN_LOG10F
8550 || fcode
== BUILT_IN_LOG10L
)))
8551 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8558 /* Return true if VAR is a VAR_DECL or a component thereof. */
8561 var_decl_component_p (tree var
)
8564 while (handled_component_p (inner
))
8565 inner
= TREE_OPERAND (inner
, 0);
8566 return SSA_VAR_P (inner
);
8569 /* Fold function call to builtin memset. Return
8570 NULL_TREE if no simplification can be made. */
8573 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8574 tree type
, bool ignore
)
8576 tree var
, ret
, etype
;
8577 unsigned HOST_WIDE_INT length
, cval
;
8579 if (! validate_arg (dest
, POINTER_TYPE
)
8580 || ! validate_arg (c
, INTEGER_TYPE
)
8581 || ! validate_arg (len
, INTEGER_TYPE
))
8584 if (! host_integerp (len
, 1))
8587 /* If the LEN parameter is zero, return DEST. */
8588 if (integer_zerop (len
))
8589 return omit_one_operand_loc (loc
, type
, dest
, c
);
8591 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8596 if (TREE_CODE (var
) != ADDR_EXPR
)
8599 var
= TREE_OPERAND (var
, 0);
8600 if (TREE_THIS_VOLATILE (var
))
8603 etype
= TREE_TYPE (var
);
8604 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8605 etype
= TREE_TYPE (etype
);
8607 if (!INTEGRAL_TYPE_P (etype
)
8608 && !POINTER_TYPE_P (etype
))
8611 if (! var_decl_component_p (var
))
8614 length
= tree_low_cst (len
, 1);
8615 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8616 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8619 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8622 if (integer_zerop (c
))
8626 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8629 cval
= TREE_INT_CST_LOW (c
);
8633 cval
|= (cval
<< 31) << 1;
8636 ret
= build_int_cst_type (etype
, cval
);
8637 var
= build_fold_indirect_ref_loc (loc
,
8638 fold_convert_loc (loc
,
8639 build_pointer_type (etype
),
8641 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8645 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8648 /* Fold function call to builtin memset. Return
8649 NULL_TREE if no simplification can be made. */
8652 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8654 if (! validate_arg (dest
, POINTER_TYPE
)
8655 || ! validate_arg (size
, INTEGER_TYPE
))
8661 /* New argument list transforming bzero(ptr x, int y) to
8662 memset(ptr x, int 0, size_t y). This is done this way
8663 so that if it isn't expanded inline, we fallback to
8664 calling bzero instead of memset. */
8666 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8667 fold_convert_loc (loc
, size_type_node
, size
),
8668 void_type_node
, ignore
);
8671 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8672 NULL_TREE if no simplification can be made.
8673 If ENDP is 0, return DEST (like memcpy).
8674 If ENDP is 1, return DEST+LEN (like mempcpy).
8675 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8676 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8680 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8681 tree len
, tree type
, bool ignore
, int endp
)
8683 tree destvar
, srcvar
, expr
;
8685 if (! validate_arg (dest
, POINTER_TYPE
)
8686 || ! validate_arg (src
, POINTER_TYPE
)
8687 || ! validate_arg (len
, INTEGER_TYPE
))
8690 /* If the LEN parameter is zero, return DEST. */
8691 if (integer_zerop (len
))
8692 return omit_one_operand_loc (loc
, type
, dest
, src
);
8694 /* If SRC and DEST are the same (and not volatile), return
8695 DEST{,+LEN,+LEN-1}. */
8696 if (operand_equal_p (src
, dest
, 0))
8700 tree srctype
, desttype
;
8701 unsigned int src_align
, dest_align
;
8706 src_align
= get_pointer_alignment (src
);
8707 dest_align
= get_pointer_alignment (dest
);
8709 /* Both DEST and SRC must be pointer types.
8710 ??? This is what old code did. Is the testing for pointer types
8713 If either SRC is readonly or length is 1, we can use memcpy. */
8714 if (!dest_align
|| !src_align
)
8716 if (readonly_data_expr (src
)
8717 || (host_integerp (len
, 1)
8718 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8719 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8721 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8724 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8727 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8728 if (TREE_CODE (src
) == ADDR_EXPR
8729 && TREE_CODE (dest
) == ADDR_EXPR
)
8731 tree src_base
, dest_base
, fn
;
8732 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8733 HOST_WIDE_INT size
= -1;
8734 HOST_WIDE_INT maxsize
= -1;
8736 srcvar
= TREE_OPERAND (src
, 0);
8737 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8739 destvar
= TREE_OPERAND (dest
, 0);
8740 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8742 if (host_integerp (len
, 1))
8743 maxsize
= tree_low_cst (len
, 1);
8746 src_offset
/= BITS_PER_UNIT
;
8747 dest_offset
/= BITS_PER_UNIT
;
8748 if (SSA_VAR_P (src_base
)
8749 && SSA_VAR_P (dest_base
))
8751 if (operand_equal_p (src_base
, dest_base
, 0)
8752 && ranges_overlap_p (src_offset
, maxsize
,
8753 dest_offset
, maxsize
))
8756 else if (TREE_CODE (src_base
) == MEM_REF
8757 && TREE_CODE (dest_base
) == MEM_REF
)
8760 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8761 TREE_OPERAND (dest_base
, 0), 0))
8763 off
= mem_ref_offset (src_base
) +
8764 double_int::from_shwi (src_offset
);
8765 if (!off
.fits_shwi ())
8767 src_offset
= off
.low
;
8768 off
= mem_ref_offset (dest_base
) +
8769 double_int::from_shwi (dest_offset
);
8770 if (!off
.fits_shwi ())
8772 dest_offset
= off
.low
;
8773 if (ranges_overlap_p (src_offset
, maxsize
,
8774 dest_offset
, maxsize
))
8780 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8783 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8786 /* If the destination and source do not alias optimize into
8788 if ((is_gimple_min_invariant (dest
)
8789 || TREE_CODE (dest
) == SSA_NAME
)
8790 && (is_gimple_min_invariant (src
)
8791 || TREE_CODE (src
) == SSA_NAME
))
8794 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8795 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8796 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8799 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8802 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8809 if (!host_integerp (len
, 0))
8812 This logic lose for arguments like (type *)malloc (sizeof (type)),
8813 since we strip the casts of up to VOID return value from malloc.
8814 Perhaps we ought to inherit type from non-VOID argument here? */
8817 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8818 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8820 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8821 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8823 tree tem
= TREE_OPERAND (src
, 0);
8825 if (tem
!= TREE_OPERAND (src
, 0))
8826 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8828 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8830 tree tem
= TREE_OPERAND (dest
, 0);
8832 if (tem
!= TREE_OPERAND (dest
, 0))
8833 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8835 srctype
= TREE_TYPE (TREE_TYPE (src
));
8836 if (TREE_CODE (srctype
) == ARRAY_TYPE
8837 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8839 srctype
= TREE_TYPE (srctype
);
8841 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8843 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8844 if (TREE_CODE (desttype
) == ARRAY_TYPE
8845 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8847 desttype
= TREE_TYPE (desttype
);
8849 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8851 if (TREE_ADDRESSABLE (srctype
)
8852 || TREE_ADDRESSABLE (desttype
))
8855 src_align
= get_pointer_alignment (src
);
8856 dest_align
= get_pointer_alignment (dest
);
8857 if (dest_align
< TYPE_ALIGN (desttype
)
8858 || src_align
< TYPE_ALIGN (srctype
))
8862 dest
= builtin_save_expr (dest
);
8864 /* Build accesses at offset zero with a ref-all character type. */
8865 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8866 ptr_mode
, true), 0);
8869 STRIP_NOPS (destvar
);
8870 if (TREE_CODE (destvar
) == ADDR_EXPR
8871 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8872 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8873 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8875 destvar
= NULL_TREE
;
8878 STRIP_NOPS (srcvar
);
8879 if (TREE_CODE (srcvar
) == ADDR_EXPR
8880 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8881 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8884 || src_align
>= TYPE_ALIGN (desttype
))
8885 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8887 else if (!STRICT_ALIGNMENT
)
8889 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8891 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8899 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8902 if (srcvar
== NULL_TREE
)
8905 if (src_align
>= TYPE_ALIGN (desttype
))
8906 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8909 if (STRICT_ALIGNMENT
)
8911 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8913 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
8916 else if (destvar
== NULL_TREE
)
8919 if (dest_align
>= TYPE_ALIGN (srctype
))
8920 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8923 if (STRICT_ALIGNMENT
)
8925 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
8927 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
8931 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
8937 if (endp
== 0 || endp
== 3)
8938 return omit_one_operand_loc (loc
, type
, dest
, expr
);
8944 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
8947 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8948 dest
= fold_convert_loc (loc
, type
, dest
);
8950 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
8954 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8955 If LEN is not NULL, it represents the length of the string to be
8956 copied. Return NULL_TREE if no simplification can be made. */
8959 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
8963 if (!validate_arg (dest
, POINTER_TYPE
)
8964 || !validate_arg (src
, POINTER_TYPE
))
8967 /* If SRC and DEST are the same (and not volatile), return DEST. */
8968 if (operand_equal_p (src
, dest
, 0))
8969 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8971 if (optimize_function_for_size_p (cfun
))
8974 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8980 len
= c_strlen (src
, 1);
8981 if (! len
|| TREE_SIDE_EFFECTS (len
))
8985 len
= fold_convert_loc (loc
, size_type_node
, len
);
8986 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
8987 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8988 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8991 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8992 Return NULL_TREE if no simplification can be made. */
8995 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8997 tree fn
, len
, lenp1
, call
, type
;
8999 if (!validate_arg (dest
, POINTER_TYPE
)
9000 || !validate_arg (src
, POINTER_TYPE
))
9003 len
= c_strlen (src
, 1);
9005 || TREE_CODE (len
) != INTEGER_CST
)
9008 if (optimize_function_for_size_p (cfun
)
9009 /* If length is zero it's small enough. */
9010 && !integer_zerop (len
))
9013 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9017 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
9018 fold_convert_loc (loc
, size_type_node
, len
),
9019 build_int_cst (size_type_node
, 1));
9020 /* We use dest twice in building our expression. Save it from
9021 multiple expansions. */
9022 dest
= builtin_save_expr (dest
);
9023 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
9025 type
= TREE_TYPE (TREE_TYPE (fndecl
));
9026 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9027 dest
= fold_convert_loc (loc
, type
, dest
);
9028 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
9032 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9033 If SLEN is not NULL, it represents the length of the source string.
9034 Return NULL_TREE if no simplification can be made. */
9037 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
9038 tree src
, tree len
, tree slen
)
9042 if (!validate_arg (dest
, POINTER_TYPE
)
9043 || !validate_arg (src
, POINTER_TYPE
)
9044 || !validate_arg (len
, INTEGER_TYPE
))
9047 /* If the LEN parameter is zero, return DEST. */
9048 if (integer_zerop (len
))
9049 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
9051 /* We can't compare slen with len as constants below if len is not a
9053 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
9057 slen
= c_strlen (src
, 1);
9059 /* Now, we must be passed a constant src ptr parameter. */
9060 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
9063 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
9065 /* We do not support simplification of this case, though we do
9066 support it when expanding trees into RTL. */
9067 /* FIXME: generate a call to __builtin_memset. */
9068 if (tree_int_cst_lt (slen
, len
))
9071 /* OK transform into builtin memcpy. */
9072 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9076 len
= fold_convert_loc (loc
, size_type_node
, len
);
9077 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9078 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9081 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9082 arguments to the call, and TYPE is its return type.
9083 Return NULL_TREE if no simplification can be made. */
9086 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9088 if (!validate_arg (arg1
, POINTER_TYPE
)
9089 || !validate_arg (arg2
, INTEGER_TYPE
)
9090 || !validate_arg (len
, INTEGER_TYPE
))
9096 if (TREE_CODE (arg2
) != INTEGER_CST
9097 || !host_integerp (len
, 1))
9100 p1
= c_getstr (arg1
);
9101 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9107 if (target_char_cast (arg2
, &c
))
9110 r
= (const char *) memchr (p1
, c
, tree_low_cst (len
, 1));
9113 return build_int_cst (TREE_TYPE (arg1
), 0);
9115 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9116 return fold_convert_loc (loc
, type
, tem
);
9122 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9123 Return NULL_TREE if no simplification can be made. */
9126 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9128 const char *p1
, *p2
;
9130 if (!validate_arg (arg1
, POINTER_TYPE
)
9131 || !validate_arg (arg2
, POINTER_TYPE
)
9132 || !validate_arg (len
, INTEGER_TYPE
))
9135 /* If the LEN parameter is zero, return zero. */
9136 if (integer_zerop (len
))
9137 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9140 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9141 if (operand_equal_p (arg1
, arg2
, 0))
9142 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9144 p1
= c_getstr (arg1
);
9145 p2
= c_getstr (arg2
);
9147 /* If all arguments are constant, and the value of len is not greater
9148 than the lengths of arg1 and arg2, evaluate at compile-time. */
9149 if (host_integerp (len
, 1) && p1
&& p2
9150 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9151 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9153 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9156 return integer_one_node
;
9158 return integer_minus_one_node
;
9160 return integer_zero_node
;
9163 /* If len parameter is one, return an expression corresponding to
9164 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9165 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9167 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9168 tree cst_uchar_ptr_node
9169 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9172 = fold_convert_loc (loc
, integer_type_node
,
9173 build1 (INDIRECT_REF
, cst_uchar_node
,
9174 fold_convert_loc (loc
,
9178 = fold_convert_loc (loc
, integer_type_node
,
9179 build1 (INDIRECT_REF
, cst_uchar_node
,
9180 fold_convert_loc (loc
,
9183 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9189 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9190 Return NULL_TREE if no simplification can be made. */
9193 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9195 const char *p1
, *p2
;
9197 if (!validate_arg (arg1
, POINTER_TYPE
)
9198 || !validate_arg (arg2
, POINTER_TYPE
))
9201 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9202 if (operand_equal_p (arg1
, arg2
, 0))
9203 return integer_zero_node
;
9205 p1
= c_getstr (arg1
);
9206 p2
= c_getstr (arg2
);
9210 const int i
= strcmp (p1
, p2
);
9212 return integer_minus_one_node
;
9214 return integer_one_node
;
9216 return integer_zero_node
;
9219 /* If the second arg is "", return *(const unsigned char*)arg1. */
9220 if (p2
&& *p2
== '\0')
9222 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9223 tree cst_uchar_ptr_node
9224 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9226 return fold_convert_loc (loc
, integer_type_node
,
9227 build1 (INDIRECT_REF
, cst_uchar_node
,
9228 fold_convert_loc (loc
,
9233 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9234 if (p1
&& *p1
== '\0')
9236 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9237 tree cst_uchar_ptr_node
9238 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9241 = fold_convert_loc (loc
, integer_type_node
,
9242 build1 (INDIRECT_REF
, cst_uchar_node
,
9243 fold_convert_loc (loc
,
9246 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9252 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9253 Return NULL_TREE if no simplification can be made. */
9256 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9258 const char *p1
, *p2
;
9260 if (!validate_arg (arg1
, POINTER_TYPE
)
9261 || !validate_arg (arg2
, POINTER_TYPE
)
9262 || !validate_arg (len
, INTEGER_TYPE
))
9265 /* If the LEN parameter is zero, return zero. */
9266 if (integer_zerop (len
))
9267 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9270 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9271 if (operand_equal_p (arg1
, arg2
, 0))
9272 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9274 p1
= c_getstr (arg1
);
9275 p2
= c_getstr (arg2
);
9277 if (host_integerp (len
, 1) && p1
&& p2
)
9279 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9281 return integer_one_node
;
9283 return integer_minus_one_node
;
9285 return integer_zero_node
;
9288 /* If the second arg is "", and the length is greater than zero,
9289 return *(const unsigned char*)arg1. */
9290 if (p2
&& *p2
== '\0'
9291 && TREE_CODE (len
) == INTEGER_CST
9292 && tree_int_cst_sgn (len
) == 1)
9294 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9295 tree cst_uchar_ptr_node
9296 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9298 return fold_convert_loc (loc
, integer_type_node
,
9299 build1 (INDIRECT_REF
, cst_uchar_node
,
9300 fold_convert_loc (loc
,
9305 /* If the first arg is "", and the length is greater than zero,
9306 return -*(const unsigned char*)arg2. */
9307 if (p1
&& *p1
== '\0'
9308 && TREE_CODE (len
) == INTEGER_CST
9309 && tree_int_cst_sgn (len
) == 1)
9311 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9312 tree cst_uchar_ptr_node
9313 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9315 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9316 build1 (INDIRECT_REF
, cst_uchar_node
,
9317 fold_convert_loc (loc
,
9320 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9323 /* If len parameter is one, return an expression corresponding to
9324 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9325 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9327 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9328 tree cst_uchar_ptr_node
9329 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9331 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9332 build1 (INDIRECT_REF
, cst_uchar_node
,
9333 fold_convert_loc (loc
,
9336 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9337 build1 (INDIRECT_REF
, cst_uchar_node
,
9338 fold_convert_loc (loc
,
9341 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9347 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9348 ARG. Return NULL_TREE if no simplification can be made. */
9351 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9353 if (!validate_arg (arg
, REAL_TYPE
))
9356 /* If ARG is a compile-time constant, determine the result. */
9357 if (TREE_CODE (arg
) == REAL_CST
9358 && !TREE_OVERFLOW (arg
))
9362 c
= TREE_REAL_CST (arg
);
9363 return (REAL_VALUE_NEGATIVE (c
)
9364 ? build_one_cst (type
)
9365 : build_zero_cst (type
));
9368 /* If ARG is non-negative, the result is always zero. */
9369 if (tree_expr_nonnegative_p (arg
))
9370 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9372 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9373 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9374 return fold_convert (type
,
9375 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9376 build_real (TREE_TYPE (arg
), dconst0
)));
9381 /* Fold function call to builtin copysign, copysignf or copysignl with
9382 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9386 fold_builtin_copysign (location_t loc
, tree fndecl
,
9387 tree arg1
, tree arg2
, tree type
)
9391 if (!validate_arg (arg1
, REAL_TYPE
)
9392 || !validate_arg (arg2
, REAL_TYPE
))
9395 /* copysign(X,X) is X. */
9396 if (operand_equal_p (arg1
, arg2
, 0))
9397 return fold_convert_loc (loc
, type
, arg1
);
9399 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9400 if (TREE_CODE (arg1
) == REAL_CST
9401 && TREE_CODE (arg2
) == REAL_CST
9402 && !TREE_OVERFLOW (arg1
)
9403 && !TREE_OVERFLOW (arg2
))
9405 REAL_VALUE_TYPE c1
, c2
;
9407 c1
= TREE_REAL_CST (arg1
);
9408 c2
= TREE_REAL_CST (arg2
);
9409 /* c1.sign := c2.sign. */
9410 real_copysign (&c1
, &c2
);
9411 return build_real (type
, c1
);
9414 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9415 Remember to evaluate Y for side-effects. */
9416 if (tree_expr_nonnegative_p (arg2
))
9417 return omit_one_operand_loc (loc
, type
,
9418 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9421 /* Strip sign changing operations for the first argument. */
9422 tem
= fold_strip_sign_ops (arg1
);
9424 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9429 /* Fold a call to builtin isascii with argument ARG. */
9432 fold_builtin_isascii (location_t loc
, tree arg
)
9434 if (!validate_arg (arg
, INTEGER_TYPE
))
9438 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9439 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9440 build_int_cst (integer_type_node
,
9441 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9442 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9443 arg
, integer_zero_node
);
9447 /* Fold a call to builtin toascii with argument ARG. */
9450 fold_builtin_toascii (location_t loc
, tree arg
)
9452 if (!validate_arg (arg
, INTEGER_TYPE
))
9455 /* Transform toascii(c) -> (c & 0x7f). */
9456 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9457 build_int_cst (integer_type_node
, 0x7f));
9460 /* Fold a call to builtin isdigit with argument ARG. */
9463 fold_builtin_isdigit (location_t loc
, tree arg
)
9465 if (!validate_arg (arg
, INTEGER_TYPE
))
9469 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9470 /* According to the C standard, isdigit is unaffected by locale.
9471 However, it definitely is affected by the target character set. */
9472 unsigned HOST_WIDE_INT target_digit0
9473 = lang_hooks
.to_target_charset ('0');
9475 if (target_digit0
== 0)
9478 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9479 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9480 build_int_cst (unsigned_type_node
, target_digit0
));
9481 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9482 build_int_cst (unsigned_type_node
, 9));
9486 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9489 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9491 if (!validate_arg (arg
, REAL_TYPE
))
9494 arg
= fold_convert_loc (loc
, type
, arg
);
9495 if (TREE_CODE (arg
) == REAL_CST
)
9496 return fold_abs_const (arg
, type
);
9497 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9500 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9503 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9505 if (!validate_arg (arg
, INTEGER_TYPE
))
9508 arg
= fold_convert_loc (loc
, type
, arg
);
9509 if (TREE_CODE (arg
) == INTEGER_CST
)
9510 return fold_abs_const (arg
, type
);
9511 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9514 /* Fold a fma operation with arguments ARG[012]. */
9517 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9518 tree type
, tree arg0
, tree arg1
, tree arg2
)
9520 if (TREE_CODE (arg0
) == REAL_CST
9521 && TREE_CODE (arg1
) == REAL_CST
9522 && TREE_CODE (arg2
) == REAL_CST
)
9523 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9528 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9531 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9533 if (validate_arg (arg0
, REAL_TYPE
)
9534 && validate_arg (arg1
, REAL_TYPE
)
9535 && validate_arg (arg2
, REAL_TYPE
))
9537 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9541 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9542 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9543 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9548 /* Fold a call to builtin fmin or fmax. */
9551 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9552 tree type
, bool max
)
9554 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9556 /* Calculate the result when the argument is a constant. */
9557 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9562 /* If either argument is NaN, return the other one. Avoid the
9563 transformation if we get (and honor) a signalling NaN. Using
9564 omit_one_operand() ensures we create a non-lvalue. */
9565 if (TREE_CODE (arg0
) == REAL_CST
9566 && real_isnan (&TREE_REAL_CST (arg0
))
9567 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9568 || ! TREE_REAL_CST (arg0
).signalling
))
9569 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9570 if (TREE_CODE (arg1
) == REAL_CST
9571 && real_isnan (&TREE_REAL_CST (arg1
))
9572 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9573 || ! TREE_REAL_CST (arg1
).signalling
))
9574 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9576 /* Transform fmin/fmax(x,x) -> x. */
9577 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9578 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9580 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9581 functions to return the numeric arg if the other one is NaN.
9582 These tree codes don't honor that, so only transform if
9583 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9584 handled, so we don't have to worry about it either. */
9585 if (flag_finite_math_only
)
9586 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9587 fold_convert_loc (loc
, type
, arg0
),
9588 fold_convert_loc (loc
, type
, arg1
));
9593 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9596 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9598 if (validate_arg (arg
, COMPLEX_TYPE
)
9599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9601 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9605 tree new_arg
= builtin_save_expr (arg
);
9606 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9607 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9608 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9615 /* Fold a call to builtin logb/ilogb. */
9618 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9620 if (! validate_arg (arg
, REAL_TYPE
))
9625 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9627 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9633 /* If arg is Inf or NaN and we're logb, return it. */
9634 if (TREE_CODE (rettype
) == REAL_TYPE
)
9636 /* For logb(-Inf) we have to return +Inf. */
9637 if (real_isinf (value
) && real_isneg (value
))
9639 REAL_VALUE_TYPE tem
;
9641 return build_real (rettype
, tem
);
9643 return fold_convert_loc (loc
, rettype
, arg
);
9645 /* Fall through... */
9647 /* Zero may set errno and/or raise an exception for logb, also
9648 for ilogb we don't know FP_ILOGB0. */
9651 /* For normal numbers, proceed iff radix == 2. In GCC,
9652 normalized significands are in the range [0.5, 1.0). We
9653 want the exponent as if they were [1.0, 2.0) so get the
9654 exponent and subtract 1. */
9655 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9656 return fold_convert_loc (loc
, rettype
,
9657 build_int_cst (integer_type_node
,
9658 REAL_EXP (value
)-1));
9666 /* Fold a call to builtin significand, if radix == 2. */
9669 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9671 if (! validate_arg (arg
, REAL_TYPE
))
9676 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9678 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9685 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9686 return fold_convert_loc (loc
, rettype
, arg
);
9688 /* For normal numbers, proceed iff radix == 2. */
9689 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9691 REAL_VALUE_TYPE result
= *value
;
9692 /* In GCC, normalized significands are in the range [0.5,
9693 1.0). We want them to be [1.0, 2.0) so set the
9695 SET_REAL_EXP (&result
, 1);
9696 return build_real (rettype
, result
);
9705 /* Fold a call to builtin frexp, we can assume the base is 2. */
9708 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9710 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9715 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9718 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9720 /* Proceed if a valid pointer type was passed in. */
9721 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9723 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9729 /* For +-0, return (*exp = 0, +-0). */
9730 exp
= integer_zero_node
;
9735 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9736 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9739 /* Since the frexp function always expects base 2, and in
9740 GCC normalized significands are already in the range
9741 [0.5, 1.0), we have exactly what frexp wants. */
9742 REAL_VALUE_TYPE frac_rvt
= *value
;
9743 SET_REAL_EXP (&frac_rvt
, 0);
9744 frac
= build_real (rettype
, frac_rvt
);
9745 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9752 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9753 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9754 TREE_SIDE_EFFECTS (arg1
) = 1;
9755 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9761 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9762 then we can assume the base is two. If it's false, then we have to
9763 check the mode of the TYPE parameter in certain cases. */
9766 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9767 tree type
, bool ldexp
)
9769 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9774 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9775 if (real_zerop (arg0
) || integer_zerop (arg1
)
9776 || (TREE_CODE (arg0
) == REAL_CST
9777 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9778 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9780 /* If both arguments are constant, then try to evaluate it. */
9781 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9782 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9783 && host_integerp (arg1
, 0))
9785 /* Bound the maximum adjustment to twice the range of the
9786 mode's valid exponents. Use abs to ensure the range is
9787 positive as a sanity check. */
9788 const long max_exp_adj
= 2 *
9789 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9790 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9792 /* Get the user-requested adjustment. */
9793 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9795 /* The requested adjustment must be inside this range. This
9796 is a preliminary cap to avoid things like overflow, we
9797 may still fail to compute the result for other reasons. */
9798 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9800 REAL_VALUE_TYPE initial_result
;
9802 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9804 /* Ensure we didn't overflow. */
9805 if (! real_isinf (&initial_result
))
9807 const REAL_VALUE_TYPE trunc_result
9808 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9810 /* Only proceed if the target mode can hold the
9812 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9813 return build_real (type
, trunc_result
);
9822 /* Fold a call to builtin modf. */
9825 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9827 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9832 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9835 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9837 /* Proceed if a valid pointer type was passed in. */
9838 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9840 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9841 REAL_VALUE_TYPE trunc
, frac
;
9847 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9848 trunc
= frac
= *value
;
9851 /* For +-Inf, return (*arg1 = arg0, +-0). */
9853 frac
.sign
= value
->sign
;
9857 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9858 real_trunc (&trunc
, VOIDmode
, value
);
9859 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9860 /* If the original number was negative and already
9861 integral, then the fractional part is -0.0. */
9862 if (value
->sign
&& frac
.cl
== rvc_zero
)
9863 frac
.sign
= value
->sign
;
9867 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9868 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9869 build_real (rettype
, trunc
));
9870 TREE_SIDE_EFFECTS (arg1
) = 1;
9871 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9872 build_real (rettype
, frac
));
9878 /* Given a location LOC, an interclass builtin function decl FNDECL
9879 and its single argument ARG, return an folded expression computing
9880 the same, or NULL_TREE if we either couldn't or didn't want to fold
9881 (the latter happen if there's an RTL instruction available). */
9884 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9886 enum machine_mode mode
;
9888 if (!validate_arg (arg
, REAL_TYPE
))
9891 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9894 mode
= TYPE_MODE (TREE_TYPE (arg
));
9896 /* If there is no optab, try generic code. */
9897 switch (DECL_FUNCTION_CODE (fndecl
))
9901 CASE_FLT_FN (BUILT_IN_ISINF
):
9903 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9904 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9905 tree
const type
= TREE_TYPE (arg
);
9909 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9910 real_from_string (&r
, buf
);
9911 result
= build_call_expr (isgr_fn
, 2,
9912 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9913 build_real (type
, r
));
9916 CASE_FLT_FN (BUILT_IN_FINITE
):
9917 case BUILT_IN_ISFINITE
:
9919 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9920 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9921 tree
const type
= TREE_TYPE (arg
);
9925 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9926 real_from_string (&r
, buf
);
9927 result
= build_call_expr (isle_fn
, 2,
9928 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9929 build_real (type
, r
));
9930 /*result = fold_build2_loc (loc, UNGT_EXPR,
9931 TREE_TYPE (TREE_TYPE (fndecl)),
9932 fold_build1_loc (loc, ABS_EXPR, type, arg),
9933 build_real (type, r));
9934 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9935 TREE_TYPE (TREE_TYPE (fndecl)),
9939 case BUILT_IN_ISNORMAL
:
9941 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9942 islessequal(fabs(x),DBL_MAX). */
9943 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9944 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9945 tree
const type
= TREE_TYPE (arg
);
9946 REAL_VALUE_TYPE rmax
, rmin
;
9949 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9950 real_from_string (&rmax
, buf
);
9951 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9952 real_from_string (&rmin
, buf
);
9953 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9954 result
= build_call_expr (isle_fn
, 2, arg
,
9955 build_real (type
, rmax
));
9956 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9957 build_call_expr (isge_fn
, 2, arg
,
9958 build_real (type
, rmin
)));
9968 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9969 ARG is the argument for the call. */
9972 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9974 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9977 if (!validate_arg (arg
, REAL_TYPE
))
9980 switch (builtin_index
)
9982 case BUILT_IN_ISINF
:
9983 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9984 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9986 if (TREE_CODE (arg
) == REAL_CST
)
9988 r
= TREE_REAL_CST (arg
);
9989 if (real_isinf (&r
))
9990 return real_compare (GT_EXPR
, &r
, &dconst0
)
9991 ? integer_one_node
: integer_minus_one_node
;
9993 return integer_zero_node
;
9998 case BUILT_IN_ISINF_SIGN
:
10000 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10001 /* In a boolean context, GCC will fold the inner COND_EXPR to
10002 1. So e.g. "if (isinf_sign(x))" would be folded to just
10003 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10004 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
10005 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
10006 tree tmp
= NULL_TREE
;
10008 arg
= builtin_save_expr (arg
);
10010 if (signbit_fn
&& isinf_fn
)
10012 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
10013 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
10015 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10016 signbit_call
, integer_zero_node
);
10017 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10018 isinf_call
, integer_zero_node
);
10020 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
10021 integer_minus_one_node
, integer_one_node
);
10022 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10024 integer_zero_node
);
10030 case BUILT_IN_ISFINITE
:
10031 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
10032 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10033 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10035 if (TREE_CODE (arg
) == REAL_CST
)
10037 r
= TREE_REAL_CST (arg
);
10038 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
10043 case BUILT_IN_ISNAN
:
10044 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
10045 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10047 if (TREE_CODE (arg
) == REAL_CST
)
10049 r
= TREE_REAL_CST (arg
);
10050 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
10053 arg
= builtin_save_expr (arg
);
10054 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
10057 gcc_unreachable ();
10061 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10062 This builtin will generate code to return the appropriate floating
10063 point classification depending on the value of the floating point
10064 number passed in. The possible return values must be supplied as
10065 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10066 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10067 one floating point argument which is "type generic". */
10070 fold_builtin_fpclassify (location_t loc
, tree exp
)
10072 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10073 arg
, type
, res
, tmp
;
10074 enum machine_mode mode
;
10078 /* Verify the required arguments in the original call. */
10079 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10080 INTEGER_TYPE
, INTEGER_TYPE
,
10081 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10084 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10085 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10086 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10087 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10088 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10089 arg
= CALL_EXPR_ARG (exp
, 5);
10090 type
= TREE_TYPE (arg
);
10091 mode
= TYPE_MODE (type
);
10092 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10094 /* fpclassify(x) ->
10095 isnan(x) ? FP_NAN :
10096 (fabs(x) == Inf ? FP_INFINITE :
10097 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10098 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10100 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10101 build_real (type
, dconst0
));
10102 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10103 tmp
, fp_zero
, fp_subnormal
);
10105 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10106 real_from_string (&r
, buf
);
10107 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10108 arg
, build_real (type
, r
));
10109 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10111 if (HONOR_INFINITIES (mode
))
10114 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10115 build_real (type
, r
));
10116 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10120 if (HONOR_NANS (mode
))
10122 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10123 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10129 /* Fold a call to an unordered comparison function such as
10130 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10131 being called and ARG0 and ARG1 are the arguments for the call.
10132 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10133 the opposite of the desired result. UNORDERED_CODE is used
10134 for modes that can hold NaNs and ORDERED_CODE is used for
10138 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10139 enum tree_code unordered_code
,
10140 enum tree_code ordered_code
)
10142 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10143 enum tree_code code
;
10145 enum tree_code code0
, code1
;
10146 tree cmp_type
= NULL_TREE
;
10148 type0
= TREE_TYPE (arg0
);
10149 type1
= TREE_TYPE (arg1
);
10151 code0
= TREE_CODE (type0
);
10152 code1
= TREE_CODE (type1
);
10154 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10155 /* Choose the wider of two real types. */
10156 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10158 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10160 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10163 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10164 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10166 if (unordered_code
== UNORDERED_EXPR
)
10168 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10169 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10170 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10173 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10175 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10176 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10179 /* Fold a call to built-in function FNDECL with 0 arguments.
10180 IGNORE is true if the result of the function call is ignored. This
10181 function returns NULL_TREE if no simplification was possible. */
10184 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10186 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10187 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10190 CASE_FLT_FN (BUILT_IN_INF
):
10191 case BUILT_IN_INFD32
:
10192 case BUILT_IN_INFD64
:
10193 case BUILT_IN_INFD128
:
10194 return fold_builtin_inf (loc
, type
, true);
10196 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10197 return fold_builtin_inf (loc
, type
, false);
10199 case BUILT_IN_CLASSIFY_TYPE
:
10200 return fold_builtin_classify_type (NULL_TREE
);
10202 case BUILT_IN_UNREACHABLE
:
10203 if (flag_sanitize
& SANITIZE_UNREACHABLE
10204 && (current_function_decl
== NULL
10205 || !lookup_attribute ("no_sanitize_undefined",
10206 DECL_ATTRIBUTES (current_function_decl
))))
10207 return ubsan_instrument_unreachable (loc
);
10216 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10217 IGNORE is true if the result of the function call is ignored. This
10218 function returns NULL_TREE if no simplification was possible. */
10221 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10223 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10224 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10227 case BUILT_IN_CONSTANT_P
:
10229 tree val
= fold_builtin_constant_p (arg0
);
10231 /* Gimplification will pull the CALL_EXPR for the builtin out of
10232 an if condition. When not optimizing, we'll not CSE it back.
10233 To avoid link error types of regressions, return false now. */
10234 if (!val
&& !optimize
)
10235 val
= integer_zero_node
;
10240 case BUILT_IN_CLASSIFY_TYPE
:
10241 return fold_builtin_classify_type (arg0
);
10243 case BUILT_IN_STRLEN
:
10244 return fold_builtin_strlen (loc
, type
, arg0
);
10246 CASE_FLT_FN (BUILT_IN_FABS
):
10247 case BUILT_IN_FABSD32
:
10248 case BUILT_IN_FABSD64
:
10249 case BUILT_IN_FABSD128
:
10250 return fold_builtin_fabs (loc
, arg0
, type
);
10253 case BUILT_IN_LABS
:
10254 case BUILT_IN_LLABS
:
10255 case BUILT_IN_IMAXABS
:
10256 return fold_builtin_abs (loc
, arg0
, type
);
10258 CASE_FLT_FN (BUILT_IN_CONJ
):
10259 if (validate_arg (arg0
, COMPLEX_TYPE
)
10260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10261 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10264 CASE_FLT_FN (BUILT_IN_CREAL
):
10265 if (validate_arg (arg0
, COMPLEX_TYPE
)
10266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10267 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10270 CASE_FLT_FN (BUILT_IN_CIMAG
):
10271 if (validate_arg (arg0
, COMPLEX_TYPE
)
10272 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10273 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10276 CASE_FLT_FN (BUILT_IN_CCOS
):
10277 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10279 CASE_FLT_FN (BUILT_IN_CCOSH
):
10280 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10282 CASE_FLT_FN (BUILT_IN_CPROJ
):
10283 return fold_builtin_cproj (loc
, arg0
, type
);
10285 CASE_FLT_FN (BUILT_IN_CSIN
):
10286 if (validate_arg (arg0
, COMPLEX_TYPE
)
10287 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10288 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10291 CASE_FLT_FN (BUILT_IN_CSINH
):
10292 if (validate_arg (arg0
, COMPLEX_TYPE
)
10293 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10294 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10297 CASE_FLT_FN (BUILT_IN_CTAN
):
10298 if (validate_arg (arg0
, COMPLEX_TYPE
)
10299 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10300 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10303 CASE_FLT_FN (BUILT_IN_CTANH
):
10304 if (validate_arg (arg0
, COMPLEX_TYPE
)
10305 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10306 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10309 CASE_FLT_FN (BUILT_IN_CLOG
):
10310 if (validate_arg (arg0
, COMPLEX_TYPE
)
10311 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10312 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10315 CASE_FLT_FN (BUILT_IN_CSQRT
):
10316 if (validate_arg (arg0
, COMPLEX_TYPE
)
10317 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10318 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10321 CASE_FLT_FN (BUILT_IN_CASIN
):
10322 if (validate_arg (arg0
, COMPLEX_TYPE
)
10323 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10324 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10327 CASE_FLT_FN (BUILT_IN_CACOS
):
10328 if (validate_arg (arg0
, COMPLEX_TYPE
)
10329 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10330 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10333 CASE_FLT_FN (BUILT_IN_CATAN
):
10334 if (validate_arg (arg0
, COMPLEX_TYPE
)
10335 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10336 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10339 CASE_FLT_FN (BUILT_IN_CASINH
):
10340 if (validate_arg (arg0
, COMPLEX_TYPE
)
10341 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10342 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10345 CASE_FLT_FN (BUILT_IN_CACOSH
):
10346 if (validate_arg (arg0
, COMPLEX_TYPE
)
10347 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10348 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10351 CASE_FLT_FN (BUILT_IN_CATANH
):
10352 if (validate_arg (arg0
, COMPLEX_TYPE
)
10353 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10354 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10357 CASE_FLT_FN (BUILT_IN_CABS
):
10358 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10360 CASE_FLT_FN (BUILT_IN_CARG
):
10361 return fold_builtin_carg (loc
, arg0
, type
);
10363 CASE_FLT_FN (BUILT_IN_SQRT
):
10364 return fold_builtin_sqrt (loc
, arg0
, type
);
10366 CASE_FLT_FN (BUILT_IN_CBRT
):
10367 return fold_builtin_cbrt (loc
, arg0
, type
);
10369 CASE_FLT_FN (BUILT_IN_ASIN
):
10370 if (validate_arg (arg0
, REAL_TYPE
))
10371 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10372 &dconstm1
, &dconst1
, true);
10375 CASE_FLT_FN (BUILT_IN_ACOS
):
10376 if (validate_arg (arg0
, REAL_TYPE
))
10377 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10378 &dconstm1
, &dconst1
, true);
10381 CASE_FLT_FN (BUILT_IN_ATAN
):
10382 if (validate_arg (arg0
, REAL_TYPE
))
10383 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10386 CASE_FLT_FN (BUILT_IN_ASINH
):
10387 if (validate_arg (arg0
, REAL_TYPE
))
10388 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10391 CASE_FLT_FN (BUILT_IN_ACOSH
):
10392 if (validate_arg (arg0
, REAL_TYPE
))
10393 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10394 &dconst1
, NULL
, true);
10397 CASE_FLT_FN (BUILT_IN_ATANH
):
10398 if (validate_arg (arg0
, REAL_TYPE
))
10399 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10400 &dconstm1
, &dconst1
, false);
10403 CASE_FLT_FN (BUILT_IN_SIN
):
10404 if (validate_arg (arg0
, REAL_TYPE
))
10405 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10408 CASE_FLT_FN (BUILT_IN_COS
):
10409 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10411 CASE_FLT_FN (BUILT_IN_TAN
):
10412 return fold_builtin_tan (arg0
, type
);
10414 CASE_FLT_FN (BUILT_IN_CEXP
):
10415 return fold_builtin_cexp (loc
, arg0
, type
);
10417 CASE_FLT_FN (BUILT_IN_CEXPI
):
10418 if (validate_arg (arg0
, REAL_TYPE
))
10419 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10422 CASE_FLT_FN (BUILT_IN_SINH
):
10423 if (validate_arg (arg0
, REAL_TYPE
))
10424 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10427 CASE_FLT_FN (BUILT_IN_COSH
):
10428 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10430 CASE_FLT_FN (BUILT_IN_TANH
):
10431 if (validate_arg (arg0
, REAL_TYPE
))
10432 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10435 CASE_FLT_FN (BUILT_IN_ERF
):
10436 if (validate_arg (arg0
, REAL_TYPE
))
10437 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10440 CASE_FLT_FN (BUILT_IN_ERFC
):
10441 if (validate_arg (arg0
, REAL_TYPE
))
10442 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10445 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10446 if (validate_arg (arg0
, REAL_TYPE
))
10447 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10450 CASE_FLT_FN (BUILT_IN_EXP
):
10451 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10453 CASE_FLT_FN (BUILT_IN_EXP2
):
10454 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10456 CASE_FLT_FN (BUILT_IN_EXP10
):
10457 CASE_FLT_FN (BUILT_IN_POW10
):
10458 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10460 CASE_FLT_FN (BUILT_IN_EXPM1
):
10461 if (validate_arg (arg0
, REAL_TYPE
))
10462 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10465 CASE_FLT_FN (BUILT_IN_LOG
):
10466 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10468 CASE_FLT_FN (BUILT_IN_LOG2
):
10469 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10471 CASE_FLT_FN (BUILT_IN_LOG10
):
10472 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10474 CASE_FLT_FN (BUILT_IN_LOG1P
):
10475 if (validate_arg (arg0
, REAL_TYPE
))
10476 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10477 &dconstm1
, NULL
, false);
10480 CASE_FLT_FN (BUILT_IN_J0
):
10481 if (validate_arg (arg0
, REAL_TYPE
))
10482 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10486 CASE_FLT_FN (BUILT_IN_J1
):
10487 if (validate_arg (arg0
, REAL_TYPE
))
10488 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10492 CASE_FLT_FN (BUILT_IN_Y0
):
10493 if (validate_arg (arg0
, REAL_TYPE
))
10494 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10495 &dconst0
, NULL
, false);
10498 CASE_FLT_FN (BUILT_IN_Y1
):
10499 if (validate_arg (arg0
, REAL_TYPE
))
10500 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10501 &dconst0
, NULL
, false);
10504 CASE_FLT_FN (BUILT_IN_NAN
):
10505 case BUILT_IN_NAND32
:
10506 case BUILT_IN_NAND64
:
10507 case BUILT_IN_NAND128
:
10508 return fold_builtin_nan (arg0
, type
, true);
10510 CASE_FLT_FN (BUILT_IN_NANS
):
10511 return fold_builtin_nan (arg0
, type
, false);
10513 CASE_FLT_FN (BUILT_IN_FLOOR
):
10514 return fold_builtin_floor (loc
, fndecl
, arg0
);
10516 CASE_FLT_FN (BUILT_IN_CEIL
):
10517 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10519 CASE_FLT_FN (BUILT_IN_TRUNC
):
10520 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10522 CASE_FLT_FN (BUILT_IN_ROUND
):
10523 return fold_builtin_round (loc
, fndecl
, arg0
);
10525 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10526 CASE_FLT_FN (BUILT_IN_RINT
):
10527 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10529 CASE_FLT_FN (BUILT_IN_ICEIL
):
10530 CASE_FLT_FN (BUILT_IN_LCEIL
):
10531 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10532 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10533 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10534 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10535 CASE_FLT_FN (BUILT_IN_IROUND
):
10536 CASE_FLT_FN (BUILT_IN_LROUND
):
10537 CASE_FLT_FN (BUILT_IN_LLROUND
):
10538 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10540 CASE_FLT_FN (BUILT_IN_IRINT
):
10541 CASE_FLT_FN (BUILT_IN_LRINT
):
10542 CASE_FLT_FN (BUILT_IN_LLRINT
):
10543 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10545 case BUILT_IN_BSWAP16
:
10546 case BUILT_IN_BSWAP32
:
10547 case BUILT_IN_BSWAP64
:
10548 return fold_builtin_bswap (fndecl
, arg0
);
10550 CASE_INT_FN (BUILT_IN_FFS
):
10551 CASE_INT_FN (BUILT_IN_CLZ
):
10552 CASE_INT_FN (BUILT_IN_CTZ
):
10553 CASE_INT_FN (BUILT_IN_CLRSB
):
10554 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10555 CASE_INT_FN (BUILT_IN_PARITY
):
10556 return fold_builtin_bitop (fndecl
, arg0
);
10558 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10559 return fold_builtin_signbit (loc
, arg0
, type
);
10561 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10562 return fold_builtin_significand (loc
, arg0
, type
);
10564 CASE_FLT_FN (BUILT_IN_ILOGB
):
10565 CASE_FLT_FN (BUILT_IN_LOGB
):
10566 return fold_builtin_logb (loc
, arg0
, type
);
10568 case BUILT_IN_ISASCII
:
10569 return fold_builtin_isascii (loc
, arg0
);
10571 case BUILT_IN_TOASCII
:
10572 return fold_builtin_toascii (loc
, arg0
);
10574 case BUILT_IN_ISDIGIT
:
10575 return fold_builtin_isdigit (loc
, arg0
);
10577 CASE_FLT_FN (BUILT_IN_FINITE
):
10578 case BUILT_IN_FINITED32
:
10579 case BUILT_IN_FINITED64
:
10580 case BUILT_IN_FINITED128
:
10581 case BUILT_IN_ISFINITE
:
10583 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10586 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10589 CASE_FLT_FN (BUILT_IN_ISINF
):
10590 case BUILT_IN_ISINFD32
:
10591 case BUILT_IN_ISINFD64
:
10592 case BUILT_IN_ISINFD128
:
10594 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10597 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10600 case BUILT_IN_ISNORMAL
:
10601 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10603 case BUILT_IN_ISINF_SIGN
:
10604 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10606 CASE_FLT_FN (BUILT_IN_ISNAN
):
10607 case BUILT_IN_ISNAND32
:
10608 case BUILT_IN_ISNAND64
:
10609 case BUILT_IN_ISNAND128
:
10610 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10612 case BUILT_IN_PRINTF
:
10613 case BUILT_IN_PRINTF_UNLOCKED
:
10614 case BUILT_IN_VPRINTF
:
10615 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10617 case BUILT_IN_FREE
:
10618 if (integer_zerop (arg0
))
10619 return build_empty_stmt (loc
);
10630 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10631 IGNORE is true if the result of the function call is ignored. This
10632 function returns NULL_TREE if no simplification was possible. */
10635 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10637 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10638 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10642 CASE_FLT_FN (BUILT_IN_JN
):
10643 if (validate_arg (arg0
, INTEGER_TYPE
)
10644 && validate_arg (arg1
, REAL_TYPE
))
10645 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10648 CASE_FLT_FN (BUILT_IN_YN
):
10649 if (validate_arg (arg0
, INTEGER_TYPE
)
10650 && validate_arg (arg1
, REAL_TYPE
))
10651 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10655 CASE_FLT_FN (BUILT_IN_DREM
):
10656 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10657 if (validate_arg (arg0
, REAL_TYPE
)
10658 && validate_arg (arg1
, REAL_TYPE
))
10659 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10662 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10663 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10664 if (validate_arg (arg0
, REAL_TYPE
)
10665 && validate_arg (arg1
, POINTER_TYPE
))
10666 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10669 CASE_FLT_FN (BUILT_IN_ATAN2
):
10670 if (validate_arg (arg0
, REAL_TYPE
)
10671 && validate_arg (arg1
, REAL_TYPE
))
10672 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10675 CASE_FLT_FN (BUILT_IN_FDIM
):
10676 if (validate_arg (arg0
, REAL_TYPE
)
10677 && validate_arg (arg1
, REAL_TYPE
))
10678 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10681 CASE_FLT_FN (BUILT_IN_HYPOT
):
10682 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10684 CASE_FLT_FN (BUILT_IN_CPOW
):
10685 if (validate_arg (arg0
, COMPLEX_TYPE
)
10686 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10687 && validate_arg (arg1
, COMPLEX_TYPE
)
10688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10689 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10692 CASE_FLT_FN (BUILT_IN_LDEXP
):
10693 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10694 CASE_FLT_FN (BUILT_IN_SCALBN
):
10695 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10696 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10697 type
, /*ldexp=*/false);
10699 CASE_FLT_FN (BUILT_IN_FREXP
):
10700 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10702 CASE_FLT_FN (BUILT_IN_MODF
):
10703 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10705 case BUILT_IN_BZERO
:
10706 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10708 case BUILT_IN_FPUTS
:
10709 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10711 case BUILT_IN_FPUTS_UNLOCKED
:
10712 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10714 case BUILT_IN_STRSTR
:
10715 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10717 case BUILT_IN_STRCAT
:
10718 return fold_builtin_strcat (loc
, arg0
, arg1
);
10720 case BUILT_IN_STRSPN
:
10721 return fold_builtin_strspn (loc
, arg0
, arg1
);
10723 case BUILT_IN_STRCSPN
:
10724 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10726 case BUILT_IN_STRCHR
:
10727 case BUILT_IN_INDEX
:
10728 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10730 case BUILT_IN_STRRCHR
:
10731 case BUILT_IN_RINDEX
:
10732 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10734 case BUILT_IN_STRCPY
:
10735 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10737 case BUILT_IN_STPCPY
:
10740 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10744 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10747 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10750 case BUILT_IN_STRCMP
:
10751 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10753 case BUILT_IN_STRPBRK
:
10754 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10756 case BUILT_IN_EXPECT
:
10757 return fold_builtin_expect (loc
, arg0
, arg1
);
10759 CASE_FLT_FN (BUILT_IN_POW
):
10760 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10762 CASE_FLT_FN (BUILT_IN_POWI
):
10763 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10765 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10766 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10768 CASE_FLT_FN (BUILT_IN_FMIN
):
10769 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10771 CASE_FLT_FN (BUILT_IN_FMAX
):
10772 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10774 case BUILT_IN_ISGREATER
:
10775 return fold_builtin_unordered_cmp (loc
, fndecl
,
10776 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10777 case BUILT_IN_ISGREATEREQUAL
:
10778 return fold_builtin_unordered_cmp (loc
, fndecl
,
10779 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10780 case BUILT_IN_ISLESS
:
10781 return fold_builtin_unordered_cmp (loc
, fndecl
,
10782 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10783 case BUILT_IN_ISLESSEQUAL
:
10784 return fold_builtin_unordered_cmp (loc
, fndecl
,
10785 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10786 case BUILT_IN_ISLESSGREATER
:
10787 return fold_builtin_unordered_cmp (loc
, fndecl
,
10788 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10789 case BUILT_IN_ISUNORDERED
:
10790 return fold_builtin_unordered_cmp (loc
, fndecl
,
10791 arg0
, arg1
, UNORDERED_EXPR
,
10794 /* We do the folding for va_start in the expander. */
10795 case BUILT_IN_VA_START
:
10798 case BUILT_IN_SPRINTF
:
10799 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10801 case BUILT_IN_OBJECT_SIZE
:
10802 return fold_builtin_object_size (arg0
, arg1
);
10804 case BUILT_IN_PRINTF
:
10805 case BUILT_IN_PRINTF_UNLOCKED
:
10806 case BUILT_IN_VPRINTF
:
10807 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10809 case BUILT_IN_PRINTF_CHK
:
10810 case BUILT_IN_VPRINTF_CHK
:
10811 if (!validate_arg (arg0
, INTEGER_TYPE
)
10812 || TREE_SIDE_EFFECTS (arg0
))
10815 return fold_builtin_printf (loc
, fndecl
,
10816 arg1
, NULL_TREE
, ignore
, fcode
);
10819 case BUILT_IN_FPRINTF
:
10820 case BUILT_IN_FPRINTF_UNLOCKED
:
10821 case BUILT_IN_VFPRINTF
:
10822 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10825 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10826 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10828 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10829 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10837 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10838 and ARG2. IGNORE is true if the result of the function call is ignored.
10839 This function returns NULL_TREE if no simplification was possible. */
10842 fold_builtin_3 (location_t loc
, tree fndecl
,
10843 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10845 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10846 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10850 CASE_FLT_FN (BUILT_IN_SINCOS
):
10851 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10853 CASE_FLT_FN (BUILT_IN_FMA
):
10854 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10857 CASE_FLT_FN (BUILT_IN_REMQUO
):
10858 if (validate_arg (arg0
, REAL_TYPE
)
10859 && validate_arg (arg1
, REAL_TYPE
)
10860 && validate_arg (arg2
, POINTER_TYPE
))
10861 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10864 case BUILT_IN_MEMSET
:
10865 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10867 case BUILT_IN_BCOPY
:
10868 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10869 void_type_node
, true, /*endp=*/3);
10871 case BUILT_IN_MEMCPY
:
10872 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10873 type
, ignore
, /*endp=*/0);
10875 case BUILT_IN_MEMPCPY
:
10876 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10877 type
, ignore
, /*endp=*/1);
10879 case BUILT_IN_MEMMOVE
:
10880 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10881 type
, ignore
, /*endp=*/3);
10883 case BUILT_IN_STRNCAT
:
10884 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10886 case BUILT_IN_STRNCPY
:
10887 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10889 case BUILT_IN_STRNCMP
:
10890 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10892 case BUILT_IN_MEMCHR
:
10893 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10895 case BUILT_IN_BCMP
:
10896 case BUILT_IN_MEMCMP
:
10897 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10899 case BUILT_IN_SPRINTF
:
10900 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10902 case BUILT_IN_SNPRINTF
:
10903 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10905 case BUILT_IN_STRCPY_CHK
:
10906 case BUILT_IN_STPCPY_CHK
:
10907 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10910 case BUILT_IN_STRCAT_CHK
:
10911 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10913 case BUILT_IN_PRINTF_CHK
:
10914 case BUILT_IN_VPRINTF_CHK
:
10915 if (!validate_arg (arg0
, INTEGER_TYPE
)
10916 || TREE_SIDE_EFFECTS (arg0
))
10919 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10922 case BUILT_IN_FPRINTF
:
10923 case BUILT_IN_FPRINTF_UNLOCKED
:
10924 case BUILT_IN_VFPRINTF
:
10925 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10928 case BUILT_IN_FPRINTF_CHK
:
10929 case BUILT_IN_VFPRINTF_CHK
:
10930 if (!validate_arg (arg1
, INTEGER_TYPE
)
10931 || TREE_SIDE_EFFECTS (arg1
))
10934 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10943 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10944 ARG2, and ARG3. IGNORE is true if the result of the function call is
10945 ignored. This function returns NULL_TREE if no simplification was
10949 fold_builtin_4 (location_t loc
, tree fndecl
,
10950 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10952 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10956 case BUILT_IN_MEMCPY_CHK
:
10957 case BUILT_IN_MEMPCPY_CHK
:
10958 case BUILT_IN_MEMMOVE_CHK
:
10959 case BUILT_IN_MEMSET_CHK
:
10960 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
10962 DECL_FUNCTION_CODE (fndecl
));
10964 case BUILT_IN_STRNCPY_CHK
:
10965 case BUILT_IN_STPNCPY_CHK
:
10966 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
10969 case BUILT_IN_STRNCAT_CHK
:
10970 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10972 case BUILT_IN_SNPRINTF
:
10973 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
10975 case BUILT_IN_FPRINTF_CHK
:
10976 case BUILT_IN_VFPRINTF_CHK
:
10977 if (!validate_arg (arg1
, INTEGER_TYPE
)
10978 || TREE_SIDE_EFFECTS (arg1
))
10981 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10991 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10992 arguments, where NARGS <= 4. IGNORE is true if the result of the
10993 function call is ignored. This function returns NULL_TREE if no
10994 simplification was possible. Note that this only folds builtins with
10995 fixed argument patterns. Foldings that do varargs-to-varargs
10996 transformations, or that match calls with more than 4 arguments,
10997 need to be handled with fold_builtin_varargs instead. */
10999 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11002 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
11004 tree ret
= NULL_TREE
;
11009 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
11012 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
11015 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
11018 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
11021 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
11029 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11030 SET_EXPR_LOCATION (ret
, loc
);
11031 TREE_NO_WARNING (ret
) = 1;
11037 /* Builtins with folding operations that operate on "..." arguments
11038 need special handling; we need to store the arguments in a convenient
11039 data structure before attempting any folding. Fortunately there are
11040 only a few builtins that fall into this category. FNDECL is the
11041 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11042 result of the function call is ignored. */
11045 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
11046 bool ignore ATTRIBUTE_UNUSED
)
11048 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11049 tree ret
= NULL_TREE
;
11053 case BUILT_IN_SPRINTF_CHK
:
11054 case BUILT_IN_VSPRINTF_CHK
:
11055 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
11058 case BUILT_IN_SNPRINTF_CHK
:
11059 case BUILT_IN_VSNPRINTF_CHK
:
11060 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
11063 case BUILT_IN_FPCLASSIFY
:
11064 ret
= fold_builtin_fpclassify (loc
, exp
);
11072 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11073 SET_EXPR_LOCATION (ret
, loc
);
11074 TREE_NO_WARNING (ret
) = 1;
11080 /* Return true if FNDECL shouldn't be folded right now.
11081 If a built-in function has an inline attribute always_inline
11082 wrapper, defer folding it after always_inline functions have
11083 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11084 might not be performed. */
11087 avoid_folding_inline_builtin (tree fndecl
)
11089 return (DECL_DECLARED_INLINE_P (fndecl
)
11090 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11092 && !cfun
->always_inline_functions_inlined
11093 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11096 /* A wrapper function for builtin folding that prevents warnings for
11097 "statement without effect" and the like, caused by removing the
11098 call node earlier than the warning is generated. */
11101 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11103 tree ret
= NULL_TREE
;
11104 tree fndecl
= get_callee_fndecl (exp
);
11106 && TREE_CODE (fndecl
) == FUNCTION_DECL
11107 && DECL_BUILT_IN (fndecl
)
11108 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11109 yet. Defer folding until we see all the arguments
11110 (after inlining). */
11111 && !CALL_EXPR_VA_ARG_PACK (exp
))
11113 int nargs
= call_expr_nargs (exp
);
11115 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11116 instead last argument is __builtin_va_arg_pack (). Defer folding
11117 even in that case, until arguments are finalized. */
11118 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11120 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11122 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11123 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11124 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11128 if (avoid_folding_inline_builtin (fndecl
))
11131 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11132 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11133 CALL_EXPR_ARGP (exp
), ignore
);
11136 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11138 tree
*args
= CALL_EXPR_ARGP (exp
);
11139 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11142 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11150 /* Conveniently construct a function call expression. FNDECL names the
11151 function to be called and N arguments are passed in the array
11155 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11157 tree fntype
= TREE_TYPE (fndecl
);
11158 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11160 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11163 /* Conveniently construct a function call expression. FNDECL names the
11164 function to be called and the arguments are passed in the vector
11168 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
11170 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
11171 vec_safe_address (vec
));
11175 /* Conveniently construct a function call expression. FNDECL names the
11176 function to be called, N is the number of arguments, and the "..."
11177 parameters are the argument expressions. */
11180 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11183 tree
*argarray
= XALLOCAVEC (tree
, n
);
11187 for (i
= 0; i
< n
; i
++)
11188 argarray
[i
] = va_arg (ap
, tree
);
11190 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11193 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11194 varargs macros aren't supported by all bootstrap compilers. */
11197 build_call_expr (tree fndecl
, int n
, ...)
11200 tree
*argarray
= XALLOCAVEC (tree
, n
);
11204 for (i
= 0; i
< n
; i
++)
11205 argarray
[i
] = va_arg (ap
, tree
);
11207 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11210 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11211 N arguments are passed in the array ARGARRAY. */
11214 fold_builtin_call_array (location_t loc
, tree type
,
11219 tree ret
= NULL_TREE
;
11222 if (TREE_CODE (fn
) == ADDR_EXPR
)
11224 tree fndecl
= TREE_OPERAND (fn
, 0);
11225 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11226 && DECL_BUILT_IN (fndecl
))
11228 /* If last argument is __builtin_va_arg_pack (), arguments to this
11229 function are not finalized yet. Defer folding until they are. */
11230 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11232 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11234 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11235 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11236 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11237 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11239 if (avoid_folding_inline_builtin (fndecl
))
11240 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11241 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11243 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11247 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11249 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11251 /* First try the transformations that don't require consing up
11253 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11258 /* If we got this far, we need to build an exp. */
11259 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11260 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11261 return ret
? ret
: exp
;
11265 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11268 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11269 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11270 of arguments in ARGS to be omitted. OLDNARGS is the number of
11271 elements in ARGS. */
11274 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11275 int skip
, tree fndecl
, int n
, va_list newargs
)
11277 int nargs
= oldnargs
- skip
+ n
;
11284 buffer
= XALLOCAVEC (tree
, nargs
);
11285 for (i
= 0; i
< n
; i
++)
11286 buffer
[i
] = va_arg (newargs
, tree
);
11287 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11288 buffer
[i
] = args
[j
];
11291 buffer
= args
+ skip
;
11293 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11296 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11297 list ARGS along with N new arguments specified as the "..."
11298 parameters. SKIP is the number of arguments in ARGS to be omitted.
11299 OLDNARGS is the number of elements in ARGS. */
11302 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11303 int skip
, tree fndecl
, int n
, ...)
11309 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11315 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11316 along with N new arguments specified as the "..." parameters. SKIP
11317 is the number of arguments in EXP to be omitted. This function is used
11318 to do varargs-to-varargs transformations. */
11321 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11327 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11328 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11334 /* Validate a single argument ARG against a tree code CODE representing
11338 validate_arg (const_tree arg
, enum tree_code code
)
11342 else if (code
== POINTER_TYPE
)
11343 return POINTER_TYPE_P (TREE_TYPE (arg
));
11344 else if (code
== INTEGER_TYPE
)
11345 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11346 return code
== TREE_CODE (TREE_TYPE (arg
));
11349 /* This function validates the types of a function call argument list
11350 against a specified list of tree_codes. If the last specifier is a 0,
11351 that represents an ellipses, otherwise the last specifier must be a
11354 This is the GIMPLE version of validate_arglist. Eventually we want to
11355 completely convert builtins.c to work from GIMPLEs and the tree based
11356 validate_arglist will then be removed. */
11359 validate_gimple_arglist (const_gimple call
, ...)
11361 enum tree_code code
;
11367 va_start (ap
, call
);
11372 code
= (enum tree_code
) va_arg (ap
, int);
11376 /* This signifies an ellipses, any further arguments are all ok. */
11380 /* This signifies an endlink, if no arguments remain, return
11381 true, otherwise return false. */
11382 res
= (i
== gimple_call_num_args (call
));
11385 /* If no parameters remain or the parameter's code does not
11386 match the specified code, return false. Otherwise continue
11387 checking any remaining arguments. */
11388 arg
= gimple_call_arg (call
, i
++);
11389 if (!validate_arg (arg
, code
))
11396 /* We need gotos here since we can only have one VA_CLOSE in a
11404 /* This function validates the types of a function call argument list
11405 against a specified list of tree_codes. If the last specifier is a 0,
11406 that represents an ellipses, otherwise the last specifier must be a
11410 validate_arglist (const_tree callexpr
, ...)
11412 enum tree_code code
;
11415 const_call_expr_arg_iterator iter
;
11418 va_start (ap
, callexpr
);
11419 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11423 code
= (enum tree_code
) va_arg (ap
, int);
11427 /* This signifies an ellipses, any further arguments are all ok. */
11431 /* This signifies an endlink, if no arguments remain, return
11432 true, otherwise return false. */
11433 res
= !more_const_call_expr_args_p (&iter
);
11436 /* If no parameters remain or the parameter's code does not
11437 match the specified code, return false. Otherwise continue
11438 checking any remaining arguments. */
11439 arg
= next_const_call_expr_arg (&iter
);
11440 if (!validate_arg (arg
, code
))
11447 /* We need gotos here since we can only have one VA_CLOSE in a
11455 /* Default target-specific builtin expander that does nothing. */
11458 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11459 rtx target ATTRIBUTE_UNUSED
,
11460 rtx subtarget ATTRIBUTE_UNUSED
,
11461 enum machine_mode mode ATTRIBUTE_UNUSED
,
11462 int ignore ATTRIBUTE_UNUSED
)
11467 /* Returns true is EXP represents data that would potentially reside
11468 in a readonly section. */
11471 readonly_data_expr (tree exp
)
11475 if (TREE_CODE (exp
) != ADDR_EXPR
)
11478 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11482 /* Make sure we call decl_readonly_section only for trees it
11483 can handle (since it returns true for everything it doesn't
11485 if (TREE_CODE (exp
) == STRING_CST
11486 || TREE_CODE (exp
) == CONSTRUCTOR
11487 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11488 return decl_readonly_section (exp
, 0);
11493 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11494 to the call, and TYPE is its return type.
11496 Return NULL_TREE if no simplification was possible, otherwise return the
11497 simplified form of the call as a tree.
11499 The simplified form may be a constant or other expression which
11500 computes the same value, but in a more efficient manner (including
11501 calls to other builtin functions).
11503 The call may contain arguments which need to be evaluated, but
11504 which are not useful to determine the result of the call. In
11505 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11506 COMPOUND_EXPR will be an argument which must be evaluated.
11507 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11508 COMPOUND_EXPR in the chain will contain the tree for the simplified
11509 form of the builtin function call. */
11512 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11514 if (!validate_arg (s1
, POINTER_TYPE
)
11515 || !validate_arg (s2
, POINTER_TYPE
))
11520 const char *p1
, *p2
;
11522 p2
= c_getstr (s2
);
11526 p1
= c_getstr (s1
);
11529 const char *r
= strstr (p1
, p2
);
11533 return build_int_cst (TREE_TYPE (s1
), 0);
11535 /* Return an offset into the constant string argument. */
11536 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11537 return fold_convert_loc (loc
, type
, tem
);
11540 /* The argument is const char *, and the result is char *, so we need
11541 a type conversion here to avoid a warning. */
11543 return fold_convert_loc (loc
, type
, s1
);
11548 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11552 /* New argument list transforming strstr(s1, s2) to
11553 strchr(s1, s2[0]). */
11554 return build_call_expr_loc (loc
, fn
, 2, s1
,
11555 build_int_cst (integer_type_node
, p2
[0]));
11559 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11560 the call, and TYPE is its return type.
11562 Return NULL_TREE if no simplification was possible, otherwise return the
11563 simplified form of the call as a tree.
11565 The simplified form may be a constant or other expression which
11566 computes the same value, but in a more efficient manner (including
11567 calls to other builtin functions).
11569 The call may contain arguments which need to be evaluated, but
11570 which are not useful to determine the result of the call. In
11571 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11572 COMPOUND_EXPR will be an argument which must be evaluated.
11573 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11574 COMPOUND_EXPR in the chain will contain the tree for the simplified
11575 form of the builtin function call. */
11578 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11580 if (!validate_arg (s1
, POINTER_TYPE
)
11581 || !validate_arg (s2
, INTEGER_TYPE
))
11587 if (TREE_CODE (s2
) != INTEGER_CST
)
11590 p1
= c_getstr (s1
);
11597 if (target_char_cast (s2
, &c
))
11600 r
= strchr (p1
, c
);
11603 return build_int_cst (TREE_TYPE (s1
), 0);
11605 /* Return an offset into the constant string argument. */
11606 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11607 return fold_convert_loc (loc
, type
, tem
);
11613 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11614 the call, and TYPE is its return type.
11616 Return NULL_TREE if no simplification was possible, otherwise return the
11617 simplified form of the call as a tree.
11619 The simplified form may be a constant or other expression which
11620 computes the same value, but in a more efficient manner (including
11621 calls to other builtin functions).
11623 The call may contain arguments which need to be evaluated, but
11624 which are not useful to determine the result of the call. In
11625 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11626 COMPOUND_EXPR will be an argument which must be evaluated.
11627 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11628 COMPOUND_EXPR in the chain will contain the tree for the simplified
11629 form of the builtin function call. */
11632 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11634 if (!validate_arg (s1
, POINTER_TYPE
)
11635 || !validate_arg (s2
, INTEGER_TYPE
))
11642 if (TREE_CODE (s2
) != INTEGER_CST
)
11645 p1
= c_getstr (s1
);
11652 if (target_char_cast (s2
, &c
))
11655 r
= strrchr (p1
, c
);
11658 return build_int_cst (TREE_TYPE (s1
), 0);
11660 /* Return an offset into the constant string argument. */
11661 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11662 return fold_convert_loc (loc
, type
, tem
);
11665 if (! integer_zerop (s2
))
11668 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11672 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11673 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11677 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11678 to the call, and TYPE is its return type.
11680 Return NULL_TREE if no simplification was possible, otherwise return the
11681 simplified form of the call as a tree.
11683 The simplified form may be a constant or other expression which
11684 computes the same value, but in a more efficient manner (including
11685 calls to other builtin functions).
11687 The call may contain arguments which need to be evaluated, but
11688 which are not useful to determine the result of the call. In
11689 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11690 COMPOUND_EXPR will be an argument which must be evaluated.
11691 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11692 COMPOUND_EXPR in the chain will contain the tree for the simplified
11693 form of the builtin function call. */
11696 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11698 if (!validate_arg (s1
, POINTER_TYPE
)
11699 || !validate_arg (s2
, POINTER_TYPE
))
11704 const char *p1
, *p2
;
11706 p2
= c_getstr (s2
);
11710 p1
= c_getstr (s1
);
11713 const char *r
= strpbrk (p1
, p2
);
11717 return build_int_cst (TREE_TYPE (s1
), 0);
11719 /* Return an offset into the constant string argument. */
11720 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11721 return fold_convert_loc (loc
, type
, tem
);
11725 /* strpbrk(x, "") == NULL.
11726 Evaluate and ignore s1 in case it had side-effects. */
11727 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11730 return NULL_TREE
; /* Really call strpbrk. */
11732 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11736 /* New argument list transforming strpbrk(s1, s2) to
11737 strchr(s1, s2[0]). */
11738 return build_call_expr_loc (loc
, fn
, 2, s1
,
11739 build_int_cst (integer_type_node
, p2
[0]));
11743 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11746 Return NULL_TREE if no simplification was possible, otherwise return the
11747 simplified form of the call as a tree.
11749 The simplified form may be a constant or other expression which
11750 computes the same value, but in a more efficient manner (including
11751 calls to other builtin functions).
11753 The call may contain arguments which need to be evaluated, but
11754 which are not useful to determine the result of the call. In
11755 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11756 COMPOUND_EXPR will be an argument which must be evaluated.
11757 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11758 COMPOUND_EXPR in the chain will contain the tree for the simplified
11759 form of the builtin function call. */
11762 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11764 if (!validate_arg (dst
, POINTER_TYPE
)
11765 || !validate_arg (src
, POINTER_TYPE
))
11769 const char *p
= c_getstr (src
);
11771 /* If the string length is zero, return the dst parameter. */
11772 if (p
&& *p
== '\0')
11775 if (optimize_insn_for_speed_p ())
11777 /* See if we can store by pieces into (dst + strlen(dst)). */
11779 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11780 tree strcpy_fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11782 if (!strlen_fn
|| !strcpy_fn
)
11785 /* If we don't have a movstr we don't want to emit an strcpy
11786 call. We have to do that if the length of the source string
11787 isn't computable (in that case we can use memcpy probably
11788 later expanding to a sequence of mov instructions). If we
11789 have movstr instructions we can emit strcpy calls. */
11792 tree len
= c_strlen (src
, 1);
11793 if (! len
|| TREE_SIDE_EFFECTS (len
))
11797 /* Stabilize the argument list. */
11798 dst
= builtin_save_expr (dst
);
11800 /* Create strlen (dst). */
11801 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11802 /* Create (dst p+ strlen (dst)). */
11804 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11805 newdst
= builtin_save_expr (newdst
);
11807 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11808 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11814 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11815 arguments to the call.
11817 Return NULL_TREE if no simplification was possible, otherwise return the
11818 simplified form of the call as a tree.
11820 The simplified form may be a constant or other expression which
11821 computes the same value, but in a more efficient manner (including
11822 calls to other builtin functions).
11824 The call may contain arguments which need to be evaluated, but
11825 which are not useful to determine the result of the call. In
11826 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11827 COMPOUND_EXPR will be an argument which must be evaluated.
11828 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11829 COMPOUND_EXPR in the chain will contain the tree for the simplified
11830 form of the builtin function call. */
11833 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11835 if (!validate_arg (dst
, POINTER_TYPE
)
11836 || !validate_arg (src
, POINTER_TYPE
)
11837 || !validate_arg (len
, INTEGER_TYPE
))
11841 const char *p
= c_getstr (src
);
11843 /* If the requested length is zero, or the src parameter string
11844 length is zero, return the dst parameter. */
11845 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11846 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11848 /* If the requested len is greater than or equal to the string
11849 length, call strcat. */
11850 if (TREE_CODE (len
) == INTEGER_CST
&& p
11851 && compare_tree_int (len
, strlen (p
)) >= 0)
11853 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11855 /* If the replacement _DECL isn't initialized, don't do the
11860 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11866 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11869 Return NULL_TREE if no simplification was possible, otherwise return the
11870 simplified form of the call as a tree.
11872 The simplified form may be a constant or other expression which
11873 computes the same value, but in a more efficient manner (including
11874 calls to other builtin functions).
11876 The call may contain arguments which need to be evaluated, but
11877 which are not useful to determine the result of the call. In
11878 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11879 COMPOUND_EXPR will be an argument which must be evaluated.
11880 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11881 COMPOUND_EXPR in the chain will contain the tree for the simplified
11882 form of the builtin function call. */
11885 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11887 if (!validate_arg (s1
, POINTER_TYPE
)
11888 || !validate_arg (s2
, POINTER_TYPE
))
11892 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11894 /* If both arguments are constants, evaluate at compile-time. */
11897 const size_t r
= strspn (p1
, p2
);
11898 return build_int_cst (size_type_node
, r
);
11901 /* If either argument is "", return NULL_TREE. */
11902 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11903 /* Evaluate and ignore both arguments in case either one has
11905 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11911 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11914 Return NULL_TREE if no simplification was possible, otherwise return the
11915 simplified form of the call as a tree.
11917 The simplified form may be a constant or other expression which
11918 computes the same value, but in a more efficient manner (including
11919 calls to other builtin functions).
11921 The call may contain arguments which need to be evaluated, but
11922 which are not useful to determine the result of the call. In
11923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11924 COMPOUND_EXPR will be an argument which must be evaluated.
11925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11926 COMPOUND_EXPR in the chain will contain the tree for the simplified
11927 form of the builtin function call. */
11930 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11932 if (!validate_arg (s1
, POINTER_TYPE
)
11933 || !validate_arg (s2
, POINTER_TYPE
))
11937 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11939 /* If both arguments are constants, evaluate at compile-time. */
11942 const size_t r
= strcspn (p1
, p2
);
11943 return build_int_cst (size_type_node
, r
);
11946 /* If the first argument is "", return NULL_TREE. */
11947 if (p1
&& *p1
== '\0')
11949 /* Evaluate and ignore argument s2 in case it has
11951 return omit_one_operand_loc (loc
, size_type_node
,
11952 size_zero_node
, s2
);
11955 /* If the second argument is "", return __builtin_strlen(s1). */
11956 if (p2
&& *p2
== '\0')
11958 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11960 /* If the replacement _DECL isn't initialized, don't do the
11965 return build_call_expr_loc (loc
, fn
, 1, s1
);
11971 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11972 to the call. IGNORE is true if the value returned
11973 by the builtin will be ignored. UNLOCKED is true is true if this
11974 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11975 the known length of the string. Return NULL_TREE if no simplification
11979 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
11980 bool ignore
, bool unlocked
, tree len
)
11982 /* If we're using an unlocked function, assume the other unlocked
11983 functions exist explicitly. */
11984 tree
const fn_fputc
= (unlocked
11985 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
11986 : builtin_decl_implicit (BUILT_IN_FPUTC
));
11987 tree
const fn_fwrite
= (unlocked
11988 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
11989 : builtin_decl_implicit (BUILT_IN_FWRITE
));
11991 /* If the return value is used, don't do the transformation. */
11995 /* Verify the arguments in the original call. */
11996 if (!validate_arg (arg0
, POINTER_TYPE
)
11997 || !validate_arg (arg1
, POINTER_TYPE
))
12001 len
= c_strlen (arg0
, 0);
12003 /* Get the length of the string passed to fputs. If the length
12004 can't be determined, punt. */
12006 || TREE_CODE (len
) != INTEGER_CST
)
12009 switch (compare_tree_int (len
, 1))
12011 case -1: /* length is 0, delete the call entirely . */
12012 return omit_one_operand_loc (loc
, integer_type_node
,
12013 integer_zero_node
, arg1
);;
12015 case 0: /* length is 1, call fputc. */
12017 const char *p
= c_getstr (arg0
);
12022 return build_call_expr_loc (loc
, fn_fputc
, 2,
12024 (integer_type_node
, p
[0]), arg1
);
12030 case 1: /* length is greater than 1, call fwrite. */
12032 /* If optimizing for size keep fputs. */
12033 if (optimize_function_for_size_p (cfun
))
12035 /* New argument list transforming fputs(string, stream) to
12036 fwrite(string, 1, len, stream). */
12038 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
12039 size_one_node
, len
, arg1
);
12044 gcc_unreachable ();
12049 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12050 produced. False otherwise. This is done so that we don't output the error
12051 or warning twice or three times. */
12054 fold_builtin_next_arg (tree exp
, bool va_start_p
)
12056 tree fntype
= TREE_TYPE (current_function_decl
);
12057 int nargs
= call_expr_nargs (exp
);
12059 /* There is good chance the current input_location points inside the
12060 definition of the va_start macro (perhaps on the token for
12061 builtin) in a system header, so warnings will not be emitted.
12062 Use the location in real source code. */
12063 source_location current_location
=
12064 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
12067 if (!stdarg_p (fntype
))
12069 error ("%<va_start%> used in function with fixed args");
12075 if (va_start_p
&& (nargs
!= 2))
12077 error ("wrong number of arguments to function %<va_start%>");
12080 arg
= CALL_EXPR_ARG (exp
, 1);
12082 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12083 when we checked the arguments and if needed issued a warning. */
12088 /* Evidently an out of date version of <stdarg.h>; can't validate
12089 va_start's second argument, but can still work as intended. */
12090 warning_at (current_location
,
12092 "%<__builtin_next_arg%> called without an argument");
12095 else if (nargs
> 1)
12097 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12100 arg
= CALL_EXPR_ARG (exp
, 0);
12103 if (TREE_CODE (arg
) == SSA_NAME
)
12104 arg
= SSA_NAME_VAR (arg
);
12106 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12107 or __builtin_next_arg (0) the first time we see it, after checking
12108 the arguments and if needed issuing a warning. */
12109 if (!integer_zerop (arg
))
12111 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12113 /* Strip off all nops for the sake of the comparison. This
12114 is not quite the same as STRIP_NOPS. It does more.
12115 We must also strip off INDIRECT_EXPR for C++ reference
12117 while (CONVERT_EXPR_P (arg
)
12118 || TREE_CODE (arg
) == INDIRECT_REF
)
12119 arg
= TREE_OPERAND (arg
, 0);
12120 if (arg
!= last_parm
)
12122 /* FIXME: Sometimes with the tree optimizers we can get the
12123 not the last argument even though the user used the last
12124 argument. We just warn and set the arg to be the last
12125 argument so that we will get wrong-code because of
12127 warning_at (current_location
,
12129 "second parameter of %<va_start%> not last named argument");
12132 /* Undefined by C99 7.15.1.4p4 (va_start):
12133 "If the parameter parmN is declared with the register storage
12134 class, with a function or array type, or with a type that is
12135 not compatible with the type that results after application of
12136 the default argument promotions, the behavior is undefined."
12138 else if (DECL_REGISTER (arg
))
12140 warning_at (current_location
,
12142 "undefined behaviour when second parameter of "
12143 "%<va_start%> is declared with %<register%> storage");
12146 /* We want to verify the second parameter just once before the tree
12147 optimizers are run and then avoid keeping it in the tree,
12148 as otherwise we could warn even for correct code like:
12149 void foo (int i, ...)
12150 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12152 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12154 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12160 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12161 ORIG may be null if this is a 2-argument call. We don't attempt to
12162 simplify calls with more than 3 arguments.
12164 Return NULL_TREE if no simplification was possible, otherwise return the
12165 simplified form of the call as a tree. If IGNORED is true, it means that
12166 the caller does not use the returned value of the function. */
12169 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12170 tree orig
, int ignored
)
12173 const char *fmt_str
= NULL
;
12175 /* Verify the required arguments in the original call. We deal with two
12176 types of sprintf() calls: 'sprintf (str, fmt)' and
12177 'sprintf (dest, "%s", orig)'. */
12178 if (!validate_arg (dest
, POINTER_TYPE
)
12179 || !validate_arg (fmt
, POINTER_TYPE
))
12181 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12184 /* Check whether the format is a literal string constant. */
12185 fmt_str
= c_getstr (fmt
);
12186 if (fmt_str
== NULL
)
12190 retval
= NULL_TREE
;
12192 if (!init_target_chars ())
12195 /* If the format doesn't contain % args or %%, use strcpy. */
12196 if (strchr (fmt_str
, target_percent
) == NULL
)
12198 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12203 /* Don't optimize sprintf (buf, "abc", ptr++). */
12207 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12208 'format' is known to contain no % formats. */
12209 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12211 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12214 /* If the format is "%s", use strcpy if the result isn't used. */
12215 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12218 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12223 /* Don't crash on sprintf (str1, "%s"). */
12227 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12230 retval
= c_strlen (orig
, 1);
12231 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12234 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12237 if (call
&& retval
)
12239 retval
= fold_convert_loc
12240 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12242 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12248 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12249 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12250 attempt to simplify calls with more than 4 arguments.
12252 Return NULL_TREE if no simplification was possible, otherwise return the
12253 simplified form of the call as a tree. If IGNORED is true, it means that
12254 the caller does not use the returned value of the function. */
12257 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12258 tree orig
, int ignored
)
12261 const char *fmt_str
= NULL
;
12262 unsigned HOST_WIDE_INT destlen
;
12264 /* Verify the required arguments in the original call. We deal with two
12265 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12266 'snprintf (dest, cst, "%s", orig)'. */
12267 if (!validate_arg (dest
, POINTER_TYPE
)
12268 || !validate_arg (destsize
, INTEGER_TYPE
)
12269 || !validate_arg (fmt
, POINTER_TYPE
))
12271 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12274 if (!host_integerp (destsize
, 1))
12277 /* Check whether the format is a literal string constant. */
12278 fmt_str
= c_getstr (fmt
);
12279 if (fmt_str
== NULL
)
12283 retval
= NULL_TREE
;
12285 if (!init_target_chars ())
12288 destlen
= tree_low_cst (destsize
, 1);
12290 /* If the format doesn't contain % args or %%, use strcpy. */
12291 if (strchr (fmt_str
, target_percent
) == NULL
)
12293 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12294 size_t len
= strlen (fmt_str
);
12296 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12300 /* We could expand this as
12301 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12303 memcpy (str, fmt_with_nul_at_cstm1, cst);
12304 but in the former case that might increase code size
12305 and in the latter case grow .rodata section too much.
12306 So punt for now. */
12307 if (len
>= destlen
)
12313 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12314 'format' is known to contain no % formats and
12315 strlen (fmt) < cst. */
12316 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12319 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12322 /* If the format is "%s", use strcpy if the result isn't used. */
12323 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12325 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12326 unsigned HOST_WIDE_INT origlen
;
12328 /* Don't crash on snprintf (str1, cst, "%s"). */
12332 retval
= c_strlen (orig
, 1);
12333 if (!retval
|| !host_integerp (retval
, 1))
12336 origlen
= tree_low_cst (retval
, 1);
12337 /* We could expand this as
12338 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12340 memcpy (str1, str2_with_nul_at_cstm1, cst);
12341 but in the former case that might increase code size
12342 and in the latter case grow .rodata section too much.
12343 So punt for now. */
12344 if (origlen
>= destlen
)
12347 /* Convert snprintf (str1, cst, "%s", str2) into
12348 strcpy (str1, str2) if strlen (str2) < cst. */
12352 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12355 retval
= NULL_TREE
;
12358 if (call
&& retval
)
12360 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12361 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12362 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12368 /* Expand a call EXP to __builtin_object_size. */
12371 expand_builtin_object_size (tree exp
)
12374 int object_size_type
;
12375 tree fndecl
= get_callee_fndecl (exp
);
12377 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12379 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12381 expand_builtin_trap ();
12385 ost
= CALL_EXPR_ARG (exp
, 1);
12388 if (TREE_CODE (ost
) != INTEGER_CST
12389 || tree_int_cst_sgn (ost
) < 0
12390 || compare_tree_int (ost
, 3) > 0)
12392 error ("%Klast argument of %D is not integer constant between 0 and 3",
12394 expand_builtin_trap ();
12398 object_size_type
= tree_low_cst (ost
, 0);
12400 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12403 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12404 FCODE is the BUILT_IN_* to use.
12405 Return NULL_RTX if we failed; the caller should emit a normal call,
12406 otherwise try to get the result in TARGET, if convenient (and in
12407 mode MODE if that's convenient). */
12410 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12411 enum built_in_function fcode
)
12413 tree dest
, src
, len
, size
;
12415 if (!validate_arglist (exp
,
12417 fcode
== BUILT_IN_MEMSET_CHK
12418 ? INTEGER_TYPE
: POINTER_TYPE
,
12419 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12422 dest
= CALL_EXPR_ARG (exp
, 0);
12423 src
= CALL_EXPR_ARG (exp
, 1);
12424 len
= CALL_EXPR_ARG (exp
, 2);
12425 size
= CALL_EXPR_ARG (exp
, 3);
12427 if (! host_integerp (size
, 1))
12430 if (host_integerp (len
, 1) || integer_all_onesp (size
))
12434 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12436 warning_at (tree_nonartificial_location (exp
),
12437 0, "%Kcall to %D will always overflow destination buffer",
12438 exp
, get_callee_fndecl (exp
));
12443 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12444 mem{cpy,pcpy,move,set} is available. */
12447 case BUILT_IN_MEMCPY_CHK
:
12448 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12450 case BUILT_IN_MEMPCPY_CHK
:
12451 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12453 case BUILT_IN_MEMMOVE_CHK
:
12454 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12456 case BUILT_IN_MEMSET_CHK
:
12457 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12466 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12467 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12468 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12469 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12471 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12475 unsigned int dest_align
= get_pointer_alignment (dest
);
12477 /* If DEST is not a pointer type, call the normal function. */
12478 if (dest_align
== 0)
12481 /* If SRC and DEST are the same (and not volatile), do nothing. */
12482 if (operand_equal_p (src
, dest
, 0))
12486 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12488 /* Evaluate and ignore LEN in case it has side-effects. */
12489 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12490 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12493 expr
= fold_build_pointer_plus (dest
, len
);
12494 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12497 /* __memmove_chk special case. */
12498 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12500 unsigned int src_align
= get_pointer_alignment (src
);
12502 if (src_align
== 0)
12505 /* If src is categorized for a readonly section we can use
12506 normal __memcpy_chk. */
12507 if (readonly_data_expr (src
))
12509 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12512 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12513 dest
, src
, len
, size
);
12514 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12515 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12516 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12523 /* Emit warning if a buffer overflow is detected at compile time. */
12526 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12530 location_t loc
= tree_nonartificial_location (exp
);
12534 case BUILT_IN_STRCPY_CHK
:
12535 case BUILT_IN_STPCPY_CHK
:
12536 /* For __strcat_chk the warning will be emitted only if overflowing
12537 by at least strlen (dest) + 1 bytes. */
12538 case BUILT_IN_STRCAT_CHK
:
12539 len
= CALL_EXPR_ARG (exp
, 1);
12540 size
= CALL_EXPR_ARG (exp
, 2);
12543 case BUILT_IN_STRNCAT_CHK
:
12544 case BUILT_IN_STRNCPY_CHK
:
12545 case BUILT_IN_STPNCPY_CHK
:
12546 len
= CALL_EXPR_ARG (exp
, 2);
12547 size
= CALL_EXPR_ARG (exp
, 3);
12549 case BUILT_IN_SNPRINTF_CHK
:
12550 case BUILT_IN_VSNPRINTF_CHK
:
12551 len
= CALL_EXPR_ARG (exp
, 1);
12552 size
= CALL_EXPR_ARG (exp
, 3);
12555 gcc_unreachable ();
12561 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12566 len
= c_strlen (len
, 1);
12567 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12570 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12572 tree src
= CALL_EXPR_ARG (exp
, 1);
12573 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12575 src
= c_strlen (src
, 1);
12576 if (! src
|| ! host_integerp (src
, 1))
12578 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12579 exp
, get_callee_fndecl (exp
));
12582 else if (tree_int_cst_lt (src
, size
))
12585 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12588 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12589 exp
, get_callee_fndecl (exp
));
12592 /* Emit warning if a buffer overflow is detected at compile time
12593 in __sprintf_chk/__vsprintf_chk calls. */
12596 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12598 tree size
, len
, fmt
;
12599 const char *fmt_str
;
12600 int nargs
= call_expr_nargs (exp
);
12602 /* Verify the required arguments in the original call. */
12606 size
= CALL_EXPR_ARG (exp
, 2);
12607 fmt
= CALL_EXPR_ARG (exp
, 3);
12609 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12612 /* Check whether the format is a literal string constant. */
12613 fmt_str
= c_getstr (fmt
);
12614 if (fmt_str
== NULL
)
12617 if (!init_target_chars ())
12620 /* If the format doesn't contain % args or %%, we know its size. */
12621 if (strchr (fmt_str
, target_percent
) == 0)
12622 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12623 /* If the format is "%s" and first ... argument is a string literal,
12625 else if (fcode
== BUILT_IN_SPRINTF_CHK
12626 && strcmp (fmt_str
, target_percent_s
) == 0)
12632 arg
= CALL_EXPR_ARG (exp
, 4);
12633 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12636 len
= c_strlen (arg
, 1);
12637 if (!len
|| ! host_integerp (len
, 1))
12643 if (! tree_int_cst_lt (len
, size
))
12644 warning_at (tree_nonartificial_location (exp
),
12645 0, "%Kcall to %D will always overflow destination buffer",
12646 exp
, get_callee_fndecl (exp
));
12649 /* Emit warning if a free is called with address of a variable. */
12652 maybe_emit_free_warning (tree exp
)
12654 tree arg
= CALL_EXPR_ARG (exp
, 0);
12657 if (TREE_CODE (arg
) != ADDR_EXPR
)
12660 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12661 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12664 if (SSA_VAR_P (arg
))
12665 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12666 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12668 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12669 "%Kattempt to free a non-heap object", exp
);
12672 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12676 fold_builtin_object_size (tree ptr
, tree ost
)
12678 unsigned HOST_WIDE_INT bytes
;
12679 int object_size_type
;
12681 if (!validate_arg (ptr
, POINTER_TYPE
)
12682 || !validate_arg (ost
, INTEGER_TYPE
))
12687 if (TREE_CODE (ost
) != INTEGER_CST
12688 || tree_int_cst_sgn (ost
) < 0
12689 || compare_tree_int (ost
, 3) > 0)
12692 object_size_type
= tree_low_cst (ost
, 0);
12694 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12695 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12696 and (size_t) 0 for types 2 and 3. */
12697 if (TREE_SIDE_EFFECTS (ptr
))
12698 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12700 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12702 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12703 if (double_int_fits_to_tree_p (size_type_node
,
12704 double_int::from_uhwi (bytes
)))
12705 return build_int_cstu (size_type_node
, bytes
);
12707 else if (TREE_CODE (ptr
) == SSA_NAME
)
12709 /* If object size is not known yet, delay folding until
12710 later. Maybe subsequent passes will help determining
12712 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12713 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12714 && double_int_fits_to_tree_p (size_type_node
,
12715 double_int::from_uhwi (bytes
)))
12716 return build_int_cstu (size_type_node
, bytes
);
12722 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12723 DEST, SRC, LEN, and SIZE are the arguments to the call.
12724 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12725 code of the builtin. If MAXLEN is not NULL, it is maximum length
12726 passed as third argument. */
12729 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12730 tree dest
, tree src
, tree len
, tree size
,
12731 tree maxlen
, bool ignore
,
12732 enum built_in_function fcode
)
12736 if (!validate_arg (dest
, POINTER_TYPE
)
12737 || !validate_arg (src
,
12738 (fcode
== BUILT_IN_MEMSET_CHK
12739 ? INTEGER_TYPE
: POINTER_TYPE
))
12740 || !validate_arg (len
, INTEGER_TYPE
)
12741 || !validate_arg (size
, INTEGER_TYPE
))
12744 /* If SRC and DEST are the same (and not volatile), return DEST
12745 (resp. DEST+LEN for __mempcpy_chk). */
12746 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12748 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12749 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12753 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12754 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12758 if (! host_integerp (size
, 1))
12761 if (! integer_all_onesp (size
))
12763 if (! host_integerp (len
, 1))
12765 /* If LEN is not constant, try MAXLEN too.
12766 For MAXLEN only allow optimizing into non-_ocs function
12767 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12768 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12770 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12772 /* (void) __mempcpy_chk () can be optimized into
12773 (void) __memcpy_chk (). */
12774 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12778 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12786 if (tree_int_cst_lt (size
, maxlen
))
12791 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12792 mem{cpy,pcpy,move,set} is available. */
12795 case BUILT_IN_MEMCPY_CHK
:
12796 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12798 case BUILT_IN_MEMPCPY_CHK
:
12799 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12801 case BUILT_IN_MEMMOVE_CHK
:
12802 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12804 case BUILT_IN_MEMSET_CHK
:
12805 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12814 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12817 /* Fold a call to the __st[rp]cpy_chk builtin.
12818 DEST, SRC, and SIZE are the arguments to the call.
12819 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12820 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12821 strings passed as second argument. */
12824 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12825 tree src
, tree size
,
12826 tree maxlen
, bool ignore
,
12827 enum built_in_function fcode
)
12831 if (!validate_arg (dest
, POINTER_TYPE
)
12832 || !validate_arg (src
, POINTER_TYPE
)
12833 || !validate_arg (size
, INTEGER_TYPE
))
12836 /* If SRC and DEST are the same (and not volatile), return DEST. */
12837 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12838 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12840 if (! host_integerp (size
, 1))
12843 if (! integer_all_onesp (size
))
12845 len
= c_strlen (src
, 1);
12846 if (! len
|| ! host_integerp (len
, 1))
12848 /* If LEN is not constant, try MAXLEN too.
12849 For MAXLEN only allow optimizing into non-_ocs function
12850 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12851 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12853 if (fcode
== BUILT_IN_STPCPY_CHK
)
12858 /* If return value of __stpcpy_chk is ignored,
12859 optimize into __strcpy_chk. */
12860 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12864 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12867 if (! len
|| TREE_SIDE_EFFECTS (len
))
12870 /* If c_strlen returned something, but not a constant,
12871 transform __strcpy_chk into __memcpy_chk. */
12872 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12876 len
= fold_convert_loc (loc
, size_type_node
, len
);
12877 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12878 build_int_cst (size_type_node
, 1));
12879 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12880 build_call_expr_loc (loc
, fn
, 4,
12881 dest
, src
, len
, size
));
12887 if (! tree_int_cst_lt (maxlen
, size
))
12891 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12892 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12893 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12897 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12900 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12901 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12902 length passed as third argument. IGNORE is true if return value can be
12903 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12906 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12907 tree len
, tree size
, tree maxlen
, bool ignore
,
12908 enum built_in_function fcode
)
12912 if (!validate_arg (dest
, POINTER_TYPE
)
12913 || !validate_arg (src
, POINTER_TYPE
)
12914 || !validate_arg (len
, INTEGER_TYPE
)
12915 || !validate_arg (size
, INTEGER_TYPE
))
12918 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
12920 /* If return value of __stpncpy_chk is ignored,
12921 optimize into __strncpy_chk. */
12922 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
12924 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12927 if (! host_integerp (size
, 1))
12930 if (! integer_all_onesp (size
))
12932 if (! host_integerp (len
, 1))
12934 /* If LEN is not constant, try MAXLEN too.
12935 For MAXLEN only allow optimizing into non-_ocs function
12936 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12937 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12943 if (tree_int_cst_lt (size
, maxlen
))
12947 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12948 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
12949 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
12953 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12956 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12957 are the arguments to the call. */
12960 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
12961 tree src
, tree size
)
12966 if (!validate_arg (dest
, POINTER_TYPE
)
12967 || !validate_arg (src
, POINTER_TYPE
)
12968 || !validate_arg (size
, INTEGER_TYPE
))
12971 p
= c_getstr (src
);
12972 /* If the SRC parameter is "", return DEST. */
12973 if (p
&& *p
== '\0')
12974 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12976 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12979 /* If __builtin_strcat_chk is used, assume strcat is available. */
12980 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
12984 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12987 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12991 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
12992 tree dest
, tree src
, tree len
, tree size
)
12997 if (!validate_arg (dest
, POINTER_TYPE
)
12998 || !validate_arg (src
, POINTER_TYPE
)
12999 || !validate_arg (size
, INTEGER_TYPE
)
13000 || !validate_arg (size
, INTEGER_TYPE
))
13003 p
= c_getstr (src
);
13004 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13005 if (p
&& *p
== '\0')
13006 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
13007 else if (integer_zerop (len
))
13008 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13010 if (! host_integerp (size
, 1))
13013 if (! integer_all_onesp (size
))
13015 tree src_len
= c_strlen (src
, 1);
13017 && host_integerp (src_len
, 1)
13018 && host_integerp (len
, 1)
13019 && ! tree_int_cst_lt (len
, src_len
))
13021 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13022 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
13026 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
13031 /* If __builtin_strncat_chk is used, assume strncat is available. */
13032 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
13036 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13039 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13040 Return NULL_TREE if a normal call should be emitted rather than
13041 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13042 or BUILT_IN_VSPRINTF_CHK. */
13045 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13046 enum built_in_function fcode
)
13048 tree dest
, size
, len
, fn
, fmt
, flag
;
13049 const char *fmt_str
;
13051 /* Verify the required arguments in the original call. */
13055 if (!validate_arg (dest
, POINTER_TYPE
))
13058 if (!validate_arg (flag
, INTEGER_TYPE
))
13061 if (!validate_arg (size
, INTEGER_TYPE
))
13064 if (!validate_arg (fmt
, POINTER_TYPE
))
13067 if (! host_integerp (size
, 1))
13072 if (!init_target_chars ())
13075 /* Check whether the format is a literal string constant. */
13076 fmt_str
= c_getstr (fmt
);
13077 if (fmt_str
!= NULL
)
13079 /* If the format doesn't contain % args or %%, we know the size. */
13080 if (strchr (fmt_str
, target_percent
) == 0)
13082 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13083 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13085 /* If the format is "%s" and first ... argument is a string literal,
13086 we know the size too. */
13087 else if (fcode
== BUILT_IN_SPRINTF_CHK
13088 && strcmp (fmt_str
, target_percent_s
) == 0)
13095 if (validate_arg (arg
, POINTER_TYPE
))
13097 len
= c_strlen (arg
, 1);
13098 if (! len
|| ! host_integerp (len
, 1))
13105 if (! integer_all_onesp (size
))
13107 if (! len
|| ! tree_int_cst_lt (len
, size
))
13111 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13112 or if format doesn't contain % chars or is "%s". */
13113 if (! integer_zerop (flag
))
13115 if (fmt_str
== NULL
)
13117 if (strchr (fmt_str
, target_percent
) != NULL
13118 && strcmp (fmt_str
, target_percent_s
))
13122 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13123 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13124 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13128 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13131 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13132 a normal call should be emitted rather than expanding the function
13133 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13136 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13137 enum built_in_function fcode
)
13139 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13140 CALL_EXPR_ARGP (exp
), fcode
);
13143 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13144 NULL_TREE if a normal call should be emitted rather than expanding
13145 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13146 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13147 passed as second argument. */
13150 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13151 tree maxlen
, enum built_in_function fcode
)
13153 tree dest
, size
, len
, fn
, fmt
, flag
;
13154 const char *fmt_str
;
13156 /* Verify the required arguments in the original call. */
13160 if (!validate_arg (dest
, POINTER_TYPE
))
13163 if (!validate_arg (len
, INTEGER_TYPE
))
13166 if (!validate_arg (flag
, INTEGER_TYPE
))
13169 if (!validate_arg (size
, INTEGER_TYPE
))
13172 if (!validate_arg (fmt
, POINTER_TYPE
))
13175 if (! host_integerp (size
, 1))
13178 if (! integer_all_onesp (size
))
13180 if (! host_integerp (len
, 1))
13182 /* If LEN is not constant, try MAXLEN too.
13183 For MAXLEN only allow optimizing into non-_ocs function
13184 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13185 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13191 if (tree_int_cst_lt (size
, maxlen
))
13195 if (!init_target_chars ())
13198 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13199 or if format doesn't contain % chars or is "%s". */
13200 if (! integer_zerop (flag
))
13202 fmt_str
= c_getstr (fmt
);
13203 if (fmt_str
== NULL
)
13205 if (strchr (fmt_str
, target_percent
) != NULL
13206 && strcmp (fmt_str
, target_percent_s
))
13210 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13212 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13213 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13217 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13220 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13221 a normal call should be emitted rather than expanding the function
13222 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13223 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13224 passed as second argument. */
13227 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13228 enum built_in_function fcode
)
13230 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13231 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13234 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13235 FMT and ARG are the arguments to the call; we don't fold cases with
13236 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13238 Return NULL_TREE if no simplification was possible, otherwise return the
13239 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13240 code of the function to be simplified. */
13243 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13244 tree arg
, bool ignore
,
13245 enum built_in_function fcode
)
13247 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13248 const char *fmt_str
= NULL
;
13250 /* If the return value is used, don't do the transformation. */
13254 /* Verify the required arguments in the original call. */
13255 if (!validate_arg (fmt
, POINTER_TYPE
))
13258 /* Check whether the format is a literal string constant. */
13259 fmt_str
= c_getstr (fmt
);
13260 if (fmt_str
== NULL
)
13263 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13265 /* If we're using an unlocked function, assume the other
13266 unlocked functions exist explicitly. */
13267 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13268 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13272 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13273 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13276 if (!init_target_chars ())
13279 if (strcmp (fmt_str
, target_percent_s
) == 0
13280 || strchr (fmt_str
, target_percent
) == NULL
)
13284 if (strcmp (fmt_str
, target_percent_s
) == 0)
13286 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13289 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13292 str
= c_getstr (arg
);
13298 /* The format specifier doesn't contain any '%' characters. */
13299 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13305 /* If the string was "", printf does nothing. */
13306 if (str
[0] == '\0')
13307 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13309 /* If the string has length of 1, call putchar. */
13310 if (str
[1] == '\0')
13312 /* Given printf("c"), (where c is any one character,)
13313 convert "c"[0] to an int and pass that to the replacement
13315 newarg
= build_int_cst (integer_type_node
, str
[0]);
13317 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13321 /* If the string was "string\n", call puts("string"). */
13322 size_t len
= strlen (str
);
13323 if ((unsigned char)str
[len
- 1] == target_newline
13324 && (size_t) (int) len
== len
13328 tree offset_node
, string_cst
;
13330 /* Create a NUL-terminated string that's one char shorter
13331 than the original, stripping off the trailing '\n'. */
13332 newarg
= build_string_literal (len
, str
);
13333 string_cst
= string_constant (newarg
, &offset_node
);
13334 gcc_checking_assert (string_cst
13335 && (TREE_STRING_LENGTH (string_cst
)
13337 && integer_zerop (offset_node
)
13339 TREE_STRING_POINTER (string_cst
)[len
- 1]
13340 == target_newline
);
13341 /* build_string_literal creates a new STRING_CST,
13342 modify it in place to avoid double copying. */
13343 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13344 newstr
[len
- 1] = '\0';
13346 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13349 /* We'd like to arrange to call fputs(string,stdout) here,
13350 but we need stdout and don't have a way to get it yet. */
13355 /* The other optimizations can be done only on the non-va_list variants. */
13356 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13359 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13360 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13362 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13365 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13368 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13369 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13371 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13374 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13380 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13383 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13384 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13385 more than 3 arguments, and ARG may be null in the 2-argument case.
13387 Return NULL_TREE if no simplification was possible, otherwise return the
13388 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13389 code of the function to be simplified. */
13392 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13393 tree fmt
, tree arg
, bool ignore
,
13394 enum built_in_function fcode
)
13396 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13397 const char *fmt_str
= NULL
;
13399 /* If the return value is used, don't do the transformation. */
13403 /* Verify the required arguments in the original call. */
13404 if (!validate_arg (fp
, POINTER_TYPE
))
13406 if (!validate_arg (fmt
, POINTER_TYPE
))
13409 /* Check whether the format is a literal string constant. */
13410 fmt_str
= c_getstr (fmt
);
13411 if (fmt_str
== NULL
)
13414 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13416 /* If we're using an unlocked function, assume the other
13417 unlocked functions exist explicitly. */
13418 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13419 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13423 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13424 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13427 if (!init_target_chars ())
13430 /* If the format doesn't contain % args or %%, use strcpy. */
13431 if (strchr (fmt_str
, target_percent
) == NULL
)
13433 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13437 /* If the format specifier was "", fprintf does nothing. */
13438 if (fmt_str
[0] == '\0')
13440 /* If FP has side-effects, just wait until gimplification is
13442 if (TREE_SIDE_EFFECTS (fp
))
13445 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13448 /* When "string" doesn't contain %, replace all cases of
13449 fprintf (fp, string) with fputs (string, fp). The fputs
13450 builtin will take care of special cases like length == 1. */
13452 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13455 /* The other optimizations can be done only on the non-va_list variants. */
13456 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13459 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13460 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13462 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13465 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13468 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13469 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13471 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13474 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13479 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13482 /* Initialize format string characters in the target charset. */
13485 init_target_chars (void)
13490 target_newline
= lang_hooks
.to_target_charset ('\n');
13491 target_percent
= lang_hooks
.to_target_charset ('%');
13492 target_c
= lang_hooks
.to_target_charset ('c');
13493 target_s
= lang_hooks
.to_target_charset ('s');
13494 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13498 target_percent_c
[0] = target_percent
;
13499 target_percent_c
[1] = target_c
;
13500 target_percent_c
[2] = '\0';
13502 target_percent_s
[0] = target_percent
;
13503 target_percent_s
[1] = target_s
;
13504 target_percent_s
[2] = '\0';
13506 target_percent_s_newline
[0] = target_percent
;
13507 target_percent_s_newline
[1] = target_s
;
13508 target_percent_s_newline
[2] = target_newline
;
13509 target_percent_s_newline
[3] = '\0';
13516 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13517 and no overflow/underflow occurred. INEXACT is true if M was not
13518 exactly calculated. TYPE is the tree type for the result. This
13519 function assumes that you cleared the MPFR flags and then
13520 calculated M to see if anything subsequently set a flag prior to
13521 entering this function. Return NULL_TREE if any checks fail. */
13524 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13526 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13527 overflow/underflow occurred. If -frounding-math, proceed iff the
13528 result of calling FUNC was exact. */
13529 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13530 && (!flag_rounding_math
|| !inexact
))
13532 REAL_VALUE_TYPE rr
;
13534 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13535 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13536 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13537 but the mpft_t is not, then we underflowed in the
13539 if (real_isfinite (&rr
)
13540 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13542 REAL_VALUE_TYPE rmode
;
13544 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13545 /* Proceed iff the specified mode can hold the value. */
13546 if (real_identical (&rmode
, &rr
))
13547 return build_real (type
, rmode
);
13553 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13554 number and no overflow/underflow occurred. INEXACT is true if M
13555 was not exactly calculated. TYPE is the tree type for the result.
13556 This function assumes that you cleared the MPFR flags and then
13557 calculated M to see if anything subsequently set a flag prior to
13558 entering this function. Return NULL_TREE if any checks fail, if
13559 FORCE_CONVERT is true, then bypass the checks. */
13562 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13564 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13565 overflow/underflow occurred. If -frounding-math, proceed iff the
13566 result of calling FUNC was exact. */
13568 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13569 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13570 && (!flag_rounding_math
|| !inexact
)))
13572 REAL_VALUE_TYPE re
, im
;
13574 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13575 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13576 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13577 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13578 but the mpft_t is not, then we underflowed in the
13581 || (real_isfinite (&re
) && real_isfinite (&im
)
13582 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13583 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13585 REAL_VALUE_TYPE re_mode
, im_mode
;
13587 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13588 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13589 /* Proceed iff the specified mode can hold the value. */
13591 || (real_identical (&re_mode
, &re
)
13592 && real_identical (&im_mode
, &im
)))
13593 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13594 build_real (TREE_TYPE (type
), im_mode
));
13600 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13601 FUNC on it and return the resulting value as a tree with type TYPE.
13602 If MIN and/or MAX are not NULL, then the supplied ARG must be
13603 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13604 acceptable values, otherwise they are not. The mpfr precision is
13605 set to the precision of TYPE. We assume that function FUNC returns
13606 zero if the result could be calculated exactly within the requested
13610 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13611 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13614 tree result
= NULL_TREE
;
13618 /* To proceed, MPFR must exactly represent the target floating point
13619 format, which only happens when the target base equals two. */
13620 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13621 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13623 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13625 if (real_isfinite (ra
)
13626 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13627 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13629 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13630 const int prec
= fmt
->p
;
13631 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13635 mpfr_init2 (m
, prec
);
13636 mpfr_from_real (m
, ra
, GMP_RNDN
);
13637 mpfr_clear_flags ();
13638 inexact
= func (m
, m
, rnd
);
13639 result
= do_mpfr_ckconv (m
, type
, inexact
);
13647 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13648 FUNC on it and return the resulting value as a tree with type TYPE.
13649 The mpfr precision is set to the precision of TYPE. We assume that
13650 function FUNC returns zero if the result could be calculated
13651 exactly within the requested precision. */
13654 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13655 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13657 tree result
= NULL_TREE
;
13662 /* To proceed, MPFR must exactly represent the target floating point
13663 format, which only happens when the target base equals two. */
13664 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13665 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13666 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13668 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13669 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13671 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13673 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13674 const int prec
= fmt
->p
;
13675 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13679 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13680 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13681 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13682 mpfr_clear_flags ();
13683 inexact
= func (m1
, m1
, m2
, rnd
);
13684 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13685 mpfr_clears (m1
, m2
, NULL
);
13692 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13693 FUNC on it and return the resulting value as a tree with type TYPE.
13694 The mpfr precision is set to the precision of TYPE. We assume that
13695 function FUNC returns zero if the result could be calculated
13696 exactly within the requested precision. */
13699 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13700 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13702 tree result
= NULL_TREE
;
13708 /* To proceed, MPFR must exactly represent the target floating point
13709 format, which only happens when the target base equals two. */
13710 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13711 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13712 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13713 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13715 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13716 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13717 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13719 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13721 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13722 const int prec
= fmt
->p
;
13723 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13727 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13728 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13729 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13730 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13731 mpfr_clear_flags ();
13732 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13733 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13734 mpfr_clears (m1
, m2
, m3
, NULL
);
13741 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13742 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13743 If ARG_SINP and ARG_COSP are NULL then the result is returned
13744 as a complex value.
13745 The type is taken from the type of ARG and is used for setting the
13746 precision of the calculation and results. */
13749 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13751 tree
const type
= TREE_TYPE (arg
);
13752 tree result
= NULL_TREE
;
13756 /* To proceed, MPFR must exactly represent the target floating point
13757 format, which only happens when the target base equals two. */
13758 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13759 && TREE_CODE (arg
) == REAL_CST
13760 && !TREE_OVERFLOW (arg
))
13762 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13764 if (real_isfinite (ra
))
13766 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13767 const int prec
= fmt
->p
;
13768 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13769 tree result_s
, result_c
;
13773 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13774 mpfr_from_real (m
, ra
, GMP_RNDN
);
13775 mpfr_clear_flags ();
13776 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13777 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13778 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13779 mpfr_clears (m
, ms
, mc
, NULL
);
13780 if (result_s
&& result_c
)
13782 /* If we are to return in a complex value do so. */
13783 if (!arg_sinp
&& !arg_cosp
)
13784 return build_complex (build_complex_type (type
),
13785 result_c
, result_s
);
13787 /* Dereference the sin/cos pointer arguments. */
13788 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13789 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13790 /* Proceed if valid pointer type were passed in. */
13791 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13792 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13794 /* Set the values. */
13795 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13797 TREE_SIDE_EFFECTS (result_s
) = 1;
13798 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13800 TREE_SIDE_EFFECTS (result_c
) = 1;
13801 /* Combine the assignments into a compound expr. */
13802 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13803 result_s
, result_c
));
13811 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13812 two-argument mpfr order N Bessel function FUNC on them and return
13813 the resulting value as a tree with type TYPE. The mpfr precision
13814 is set to the precision of TYPE. We assume that function FUNC
13815 returns zero if the result could be calculated exactly within the
13816 requested precision. */
13818 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13819 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13820 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13822 tree result
= NULL_TREE
;
13827 /* To proceed, MPFR must exactly represent the target floating point
13828 format, which only happens when the target base equals two. */
13829 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13830 && host_integerp (arg1
, 0)
13831 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13833 const HOST_WIDE_INT n
= tree_low_cst (arg1
, 0);
13834 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13837 && real_isfinite (ra
)
13838 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13840 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13841 const int prec
= fmt
->p
;
13842 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13846 mpfr_init2 (m
, prec
);
13847 mpfr_from_real (m
, ra
, GMP_RNDN
);
13848 mpfr_clear_flags ();
13849 inexact
= func (m
, n
, m
, rnd
);
13850 result
= do_mpfr_ckconv (m
, type
, inexact
);
13858 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13859 the pointer *(ARG_QUO) and return the result. The type is taken
13860 from the type of ARG0 and is used for setting the precision of the
13861 calculation and results. */
13864 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13866 tree
const type
= TREE_TYPE (arg0
);
13867 tree result
= NULL_TREE
;
13872 /* To proceed, MPFR must exactly represent the target floating point
13873 format, which only happens when the target base equals two. */
13874 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13875 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13876 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13878 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13879 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13881 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13883 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13884 const int prec
= fmt
->p
;
13885 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13890 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13891 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13892 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13893 mpfr_clear_flags ();
13894 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13895 /* Remquo is independent of the rounding mode, so pass
13896 inexact=0 to do_mpfr_ckconv(). */
13897 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13898 mpfr_clears (m0
, m1
, NULL
);
13901 /* MPFR calculates quo in the host's long so it may
13902 return more bits in quo than the target int can hold
13903 if sizeof(host long) > sizeof(target int). This can
13904 happen even for native compilers in LP64 mode. In
13905 these cases, modulo the quo value with the largest
13906 number that the target int can hold while leaving one
13907 bit for the sign. */
13908 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13909 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13911 /* Dereference the quo pointer argument. */
13912 arg_quo
= build_fold_indirect_ref (arg_quo
);
13913 /* Proceed iff a valid pointer type was passed in. */
13914 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13916 /* Set the value. */
13918 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
13919 build_int_cst (TREE_TYPE (arg_quo
),
13921 TREE_SIDE_EFFECTS (result_quo
) = 1;
13922 /* Combine the quo assignment with the rem. */
13923 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13924 result_quo
, result_rem
));
13932 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13933 resulting value as a tree with type TYPE. The mpfr precision is
13934 set to the precision of TYPE. We assume that this mpfr function
13935 returns zero if the result could be calculated exactly within the
13936 requested precision. In addition, the integer pointer represented
13937 by ARG_SG will be dereferenced and set to the appropriate signgam
13941 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13943 tree result
= NULL_TREE
;
13947 /* To proceed, MPFR must exactly represent the target floating point
13948 format, which only happens when the target base equals two. Also
13949 verify ARG is a constant and that ARG_SG is an int pointer. */
13950 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13951 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13952 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13953 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13955 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13957 /* In addition to NaN and Inf, the argument cannot be zero or a
13958 negative integer. */
13959 if (real_isfinite (ra
)
13960 && ra
->cl
!= rvc_zero
13961 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
13963 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13964 const int prec
= fmt
->p
;
13965 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13970 mpfr_init2 (m
, prec
);
13971 mpfr_from_real (m
, ra
, GMP_RNDN
);
13972 mpfr_clear_flags ();
13973 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
13974 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13980 /* Dereference the arg_sg pointer argument. */
13981 arg_sg
= build_fold_indirect_ref (arg_sg
);
13982 /* Assign the signgam value into *arg_sg. */
13983 result_sg
= fold_build2 (MODIFY_EXPR
,
13984 TREE_TYPE (arg_sg
), arg_sg
,
13985 build_int_cst (TREE_TYPE (arg_sg
), sg
));
13986 TREE_SIDE_EFFECTS (result_sg
) = 1;
13987 /* Combine the signgam assignment with the lgamma result. */
13988 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13989 result_sg
, result_lg
));
13997 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13998 function FUNC on it and return the resulting value as a tree with
13999 type TYPE. The mpfr precision is set to the precision of TYPE. We
14000 assume that function FUNC returns zero if the result could be
14001 calculated exactly within the requested precision. */
14004 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
14006 tree result
= NULL_TREE
;
14010 /* To proceed, MPFR must exactly represent the target floating point
14011 format, which only happens when the target base equals two. */
14012 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
14013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
14014 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
14016 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
14017 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
14019 if (real_isfinite (re
) && real_isfinite (im
))
14021 const struct real_format
*const fmt
=
14022 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14023 const int prec
= fmt
->p
;
14024 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14025 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14029 mpc_init2 (m
, prec
);
14030 mpfr_from_real (mpc_realref (m
), re
, rnd
);
14031 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
14032 mpfr_clear_flags ();
14033 inexact
= func (m
, m
, crnd
);
14034 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
14042 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14043 mpc function FUNC on it and return the resulting value as a tree
14044 with type TYPE. The mpfr precision is set to the precision of
14045 TYPE. We assume that function FUNC returns zero if the result
14046 could be calculated exactly within the requested precision. If
14047 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14048 in the arguments and/or results. */
14051 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
14052 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
14054 tree result
= NULL_TREE
;
14059 /* To proceed, MPFR must exactly represent the target floating point
14060 format, which only happens when the target base equals two. */
14061 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
14062 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
14063 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
14064 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
14065 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
14067 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
14068 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
14069 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
14070 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
14073 || (real_isfinite (re0
) && real_isfinite (im0
)
14074 && real_isfinite (re1
) && real_isfinite (im1
)))
14076 const struct real_format
*const fmt
=
14077 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14078 const int prec
= fmt
->p
;
14079 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14080 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14084 mpc_init2 (m0
, prec
);
14085 mpc_init2 (m1
, prec
);
14086 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
14087 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
14088 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
14089 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
14090 mpfr_clear_flags ();
14091 inexact
= func (m0
, m0
, m1
, crnd
);
14092 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14101 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14102 a normal call should be emitted rather than expanding the function
14103 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14106 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14108 int nargs
= gimple_call_num_args (stmt
);
14110 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14112 ? gimple_call_arg_ptr (stmt
, 0)
14113 : &error_mark_node
), fcode
);
14116 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14117 a normal call should be emitted rather than expanding the function
14118 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14119 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14120 passed as second argument. */
14123 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14124 enum built_in_function fcode
)
14126 int nargs
= gimple_call_num_args (stmt
);
14128 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14130 ? gimple_call_arg_ptr (stmt
, 0)
14131 : &error_mark_node
), maxlen
, fcode
);
14134 /* Builtins with folding operations that operate on "..." arguments
14135 need special handling; we need to store the arguments in a convenient
14136 data structure before attempting any folding. Fortunately there are
14137 only a few builtins that fall into this category. FNDECL is the
14138 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14139 result of the function call is ignored. */
14142 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14143 bool ignore ATTRIBUTE_UNUSED
)
14145 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14146 tree ret
= NULL_TREE
;
14150 case BUILT_IN_SPRINTF_CHK
:
14151 case BUILT_IN_VSPRINTF_CHK
:
14152 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14155 case BUILT_IN_SNPRINTF_CHK
:
14156 case BUILT_IN_VSNPRINTF_CHK
:
14157 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14164 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14165 TREE_NO_WARNING (ret
) = 1;
14171 /* A wrapper function for builtin folding that prevents warnings for
14172 "statement without effect" and the like, caused by removing the
14173 call node earlier than the warning is generated. */
14176 fold_call_stmt (gimple stmt
, bool ignore
)
14178 tree ret
= NULL_TREE
;
14179 tree fndecl
= gimple_call_fndecl (stmt
);
14180 location_t loc
= gimple_location (stmt
);
14182 && TREE_CODE (fndecl
) == FUNCTION_DECL
14183 && DECL_BUILT_IN (fndecl
)
14184 && !gimple_call_va_arg_pack_p (stmt
))
14186 int nargs
= gimple_call_num_args (stmt
);
14187 tree
*args
= (nargs
> 0
14188 ? gimple_call_arg_ptr (stmt
, 0)
14189 : &error_mark_node
);
14191 if (avoid_folding_inline_builtin (fndecl
))
14193 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14195 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14199 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14200 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14202 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14205 /* Propagate location information from original call to
14206 expansion of builtin. Otherwise things like
14207 maybe_emit_chk_warning, that operate on the expansion
14208 of a builtin, will use the wrong location information. */
14209 if (gimple_has_location (stmt
))
14211 tree realret
= ret
;
14212 if (TREE_CODE (ret
) == NOP_EXPR
)
14213 realret
= TREE_OPERAND (ret
, 0);
14214 if (CAN_HAVE_LOCATION_P (realret
)
14215 && !EXPR_HAS_LOCATION (realret
))
14216 SET_EXPR_LOCATION (realret
, loc
);
14226 /* Look up the function in builtin_decl that corresponds to DECL
14227 and set ASMSPEC as its user assembler name. DECL must be a
14228 function decl that declares a builtin. */
14231 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14234 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14235 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14238 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14239 set_user_assembler_name (builtin
, asmspec
);
14240 switch (DECL_FUNCTION_CODE (decl
))
14242 case BUILT_IN_MEMCPY
:
14243 init_block_move_fn (asmspec
);
14244 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14246 case BUILT_IN_MEMSET
:
14247 init_block_clear_fn (asmspec
);
14248 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14250 case BUILT_IN_MEMMOVE
:
14251 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14253 case BUILT_IN_MEMCMP
:
14254 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14256 case BUILT_IN_ABORT
:
14257 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14260 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14262 set_user_assembler_libfunc ("ffs", asmspec
);
14263 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14264 MODE_INT
, 0), "ffs");
14272 /* Return true if DECL is a builtin that expands to a constant or similarly
14275 is_simple_builtin (tree decl
)
14277 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14278 switch (DECL_FUNCTION_CODE (decl
))
14280 /* Builtins that expand to constants. */
14281 case BUILT_IN_CONSTANT_P
:
14282 case BUILT_IN_EXPECT
:
14283 case BUILT_IN_OBJECT_SIZE
:
14284 case BUILT_IN_UNREACHABLE
:
14285 /* Simple register moves or loads from stack. */
14286 case BUILT_IN_ASSUME_ALIGNED
:
14287 case BUILT_IN_RETURN_ADDRESS
:
14288 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14289 case BUILT_IN_FROB_RETURN_ADDR
:
14290 case BUILT_IN_RETURN
:
14291 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14292 case BUILT_IN_FRAME_ADDRESS
:
14293 case BUILT_IN_VA_END
:
14294 case BUILT_IN_STACK_SAVE
:
14295 case BUILT_IN_STACK_RESTORE
:
14296 /* Exception state returns or moves registers around. */
14297 case BUILT_IN_EH_FILTER
:
14298 case BUILT_IN_EH_POINTER
:
14299 case BUILT_IN_EH_COPY_VALUES
:
14309 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14310 most probably expanded inline into reasonably simple code. This is a
14311 superset of is_simple_builtin. */
14313 is_inexpensive_builtin (tree decl
)
14317 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14319 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14320 switch (DECL_FUNCTION_CODE (decl
))
14323 case BUILT_IN_ALLOCA
:
14324 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14325 case BUILT_IN_BSWAP16
:
14326 case BUILT_IN_BSWAP32
:
14327 case BUILT_IN_BSWAP64
:
14329 case BUILT_IN_CLZIMAX
:
14330 case BUILT_IN_CLZL
:
14331 case BUILT_IN_CLZLL
:
14333 case BUILT_IN_CTZIMAX
:
14334 case BUILT_IN_CTZL
:
14335 case BUILT_IN_CTZLL
:
14337 case BUILT_IN_FFSIMAX
:
14338 case BUILT_IN_FFSL
:
14339 case BUILT_IN_FFSLL
:
14340 case BUILT_IN_IMAXABS
:
14341 case BUILT_IN_FINITE
:
14342 case BUILT_IN_FINITEF
:
14343 case BUILT_IN_FINITEL
:
14344 case BUILT_IN_FINITED32
:
14345 case BUILT_IN_FINITED64
:
14346 case BUILT_IN_FINITED128
:
14347 case BUILT_IN_FPCLASSIFY
:
14348 case BUILT_IN_ISFINITE
:
14349 case BUILT_IN_ISINF_SIGN
:
14350 case BUILT_IN_ISINF
:
14351 case BUILT_IN_ISINFF
:
14352 case BUILT_IN_ISINFL
:
14353 case BUILT_IN_ISINFD32
:
14354 case BUILT_IN_ISINFD64
:
14355 case BUILT_IN_ISINFD128
:
14356 case BUILT_IN_ISNAN
:
14357 case BUILT_IN_ISNANF
:
14358 case BUILT_IN_ISNANL
:
14359 case BUILT_IN_ISNAND32
:
14360 case BUILT_IN_ISNAND64
:
14361 case BUILT_IN_ISNAND128
:
14362 case BUILT_IN_ISNORMAL
:
14363 case BUILT_IN_ISGREATER
:
14364 case BUILT_IN_ISGREATEREQUAL
:
14365 case BUILT_IN_ISLESS
:
14366 case BUILT_IN_ISLESSEQUAL
:
14367 case BUILT_IN_ISLESSGREATER
:
14368 case BUILT_IN_ISUNORDERED
:
14369 case BUILT_IN_VA_ARG_PACK
:
14370 case BUILT_IN_VA_ARG_PACK_LEN
:
14371 case BUILT_IN_VA_COPY
:
14372 case BUILT_IN_TRAP
:
14373 case BUILT_IN_SAVEREGS
:
14374 case BUILT_IN_POPCOUNTL
:
14375 case BUILT_IN_POPCOUNTLL
:
14376 case BUILT_IN_POPCOUNTIMAX
:
14377 case BUILT_IN_POPCOUNT
:
14378 case BUILT_IN_PARITYL
:
14379 case BUILT_IN_PARITYLL
:
14380 case BUILT_IN_PARITYIMAX
:
14381 case BUILT_IN_PARITY
:
14382 case BUILT_IN_LABS
:
14383 case BUILT_IN_LLABS
:
14384 case BUILT_IN_PREFETCH
:
14388 return is_simple_builtin (decl
);