1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
33 #include "tree-object-size.h"
36 #include "internal-fn.h"
40 #include "insn-config.h"
47 #include "insn-codes.h"
52 #include "typeclass.h"
55 #include "langhooks.h"
56 #include "tree-ssanames.h"
58 #include "value-prof.h"
59 #include "diagnostic-core.h"
63 #include "lto-streamer.h"
65 #include "tree-chkp.h"
67 #include "gomp-constants.h"
70 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
72 struct target_builtins default_target_builtins
;
74 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names
[BUILT_IN_LAST
]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names
[(int) END_BUILTINS
] =
84 #include "builtins.def"
88 /* Setup an array of builtin_info_type, make sure each element decl is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info
[(int)END_BUILTINS
];
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p
;
95 static rtx
c_readstr (const char *, machine_mode
);
96 static int target_char_cast (tree
, char *);
97 static rtx
get_memory_rtx (tree
, tree
);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 static rtx
result_vector (int, rtx
);
101 static void expand_builtin_prefetch (tree
);
102 static rtx
expand_builtin_apply_args (void);
103 static rtx
expand_builtin_apply_args_1 (void);
104 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
105 static void expand_builtin_return (rtx
);
106 static enum type_class
type_to_class (tree
);
107 static rtx
expand_builtin_classify_type (tree
);
108 static void expand_errno_check (tree
, rtx
);
109 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
114 static rtx
expand_builtin_sincos (tree
);
115 static rtx
expand_builtin_cexpi (tree
, rtx
);
116 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
117 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
118 static rtx
expand_builtin_next_arg (void);
119 static rtx
expand_builtin_va_start (tree
);
120 static rtx
expand_builtin_va_end (tree
);
121 static rtx
expand_builtin_va_copy (tree
);
122 static rtx
expand_builtin_memcmp (tree
, rtx
, machine_mode
);
123 static rtx
expand_builtin_strcmp (tree
, rtx
);
124 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
125 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
126 static rtx
expand_builtin_memcpy (tree
, rtx
);
127 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
128 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
129 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
130 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
131 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
132 machine_mode
, int, tree
);
133 static rtx
expand_builtin_strcpy (tree
, rtx
);
134 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
135 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
136 static rtx
expand_builtin_strncpy (tree
, rtx
);
137 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
138 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
139 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
140 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
141 static rtx
expand_builtin_bzero (tree
);
142 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
143 static rtx
expand_builtin_alloca (tree
, bool);
144 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
145 static rtx
expand_builtin_frame_address (tree
, tree
);
146 static tree
stabilize_va_list_loc (location_t
, tree
, int);
147 static rtx
expand_builtin_expect (tree
, rtx
);
148 static tree
fold_builtin_constant_p (tree
);
149 static tree
fold_builtin_classify_type (tree
);
150 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
151 static tree
fold_builtin_inf (location_t
, tree
, int);
152 static tree
fold_builtin_nan (tree
, tree
, int);
153 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
154 static bool validate_arg (const_tree
, enum tree_code code
);
155 static bool integer_valued_real_p (tree
);
156 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
157 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
158 static rtx
expand_builtin_signbit (tree
, rtx
);
159 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
160 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
161 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
164 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_tan (tree
, tree
);
166 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
167 static tree
fold_builtin_floor (location_t
, tree
, tree
);
168 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
169 static tree
fold_builtin_round (location_t
, tree
, tree
);
170 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
171 static tree
fold_builtin_bitop (tree
, tree
);
172 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
174 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
175 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
176 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
177 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
178 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
179 static tree
fold_builtin_isascii (location_t
, tree
);
180 static tree
fold_builtin_toascii (location_t
, tree
);
181 static tree
fold_builtin_isdigit (location_t
, tree
);
182 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
183 static tree
fold_builtin_abs (location_t
, tree
, tree
);
184 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
186 static tree
fold_builtin_0 (location_t
, tree
);
187 static tree
fold_builtin_1 (location_t
, tree
, tree
);
188 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
189 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
190 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
192 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
195 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
196 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
198 static rtx
expand_builtin_object_size (tree
);
199 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
200 enum built_in_function
);
201 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
202 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
203 static void maybe_emit_free_warning (tree
);
204 static tree
fold_builtin_object_size (tree
, tree
);
206 unsigned HOST_WIDE_INT target_newline
;
207 unsigned HOST_WIDE_INT target_percent
;
208 static unsigned HOST_WIDE_INT target_c
;
209 static unsigned HOST_WIDE_INT target_s
;
210 char target_percent_c
[3];
211 char target_percent_s
[3];
212 char target_percent_s_newline
[4];
213 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
214 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
215 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
216 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
217 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
218 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
219 static tree
do_mpfr_sincos (tree
, tree
, tree
);
220 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
221 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
222 const REAL_VALUE_TYPE
*, bool);
223 static tree
do_mpfr_remquo (tree
, tree
, tree
);
224 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
225 static void expand_builtin_sync_synchronize (void);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
230 is_builtin_name (const char *name
)
232 if (strncmp (name
, "__builtin_", 10) == 0)
234 if (strncmp (name
, "__sync_", 7) == 0)
236 if (strncmp (name
, "__atomic_", 9) == 0)
239 && (!strcmp (name
, "__cilkrts_detach")
240 || !strcmp (name
, "__cilkrts_pop_frame")))
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl
)
251 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
259 called_as_built_in (tree node
)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
264 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
265 return is_builtin_name (name
);
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
284 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
285 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
287 HOST_WIDE_INT bitsize
, bitpos
;
290 int unsignedp
, volatilep
;
291 unsigned int align
= BITS_PER_UNIT
;
292 bool known_alignment
= false;
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
297 &mode
, &unsignedp
, &volatilep
, true);
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp
) == FUNCTION_DECL
)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
308 align
= 2 * BITS_PER_UNIT
;
310 else if (TREE_CODE (exp
) == LABEL_DECL
)
312 else if (TREE_CODE (exp
) == CONST_DECL
)
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp
= DECL_INITIAL (exp
);
316 align
= TYPE_ALIGN (TREE_TYPE (exp
));
317 #ifdef CONSTANT_ALIGNMENT
318 if (CONSTANT_CLASS_P (exp
))
319 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
321 known_alignment
= true;
323 else if (DECL_P (exp
))
325 align
= DECL_ALIGN (exp
);
326 known_alignment
= true;
328 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
330 align
= TYPE_ALIGN (TREE_TYPE (exp
));
332 else if (TREE_CODE (exp
) == INDIRECT_REF
333 || TREE_CODE (exp
) == MEM_REF
334 || TREE_CODE (exp
) == TARGET_MEM_REF
)
336 tree addr
= TREE_OPERAND (exp
, 0);
338 unsigned HOST_WIDE_INT ptr_bitpos
;
339 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
341 /* If the address is explicitely aligned, handle that. */
342 if (TREE_CODE (addr
) == BIT_AND_EXPR
343 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
345 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
346 ptr_bitmask
*= BITS_PER_UNIT
;
347 align
= ptr_bitmask
& -ptr_bitmask
;
348 addr
= TREE_OPERAND (addr
, 0);
352 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
353 align
= MAX (ptr_align
, align
);
355 /* Re-apply explicit alignment to the bitpos. */
356 ptr_bitpos
&= ptr_bitmask
;
358 /* The alignment of the pointer operand in a TARGET_MEM_REF
359 has to take the variable offset parts into account. */
360 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
364 unsigned HOST_WIDE_INT step
= 1;
366 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
367 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
369 if (TMR_INDEX2 (exp
))
370 align
= BITS_PER_UNIT
;
371 known_alignment
= false;
374 /* When EXP is an actual memory reference then we can use
375 TYPE_ALIGN of a pointer indirection to derive alignment.
376 Do so only if get_pointer_alignment_1 did not reveal absolute
377 alignment knowledge and if using that alignment would
378 improve the situation. */
379 if (!addr_p
&& !known_alignment
380 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
381 align
= TYPE_ALIGN (TREE_TYPE (exp
));
384 /* Else adjust bitpos accordingly. */
385 bitpos
+= ptr_bitpos
;
386 if (TREE_CODE (exp
) == MEM_REF
387 || TREE_CODE (exp
) == TARGET_MEM_REF
)
388 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
391 else if (TREE_CODE (exp
) == STRING_CST
)
393 /* STRING_CST are the only constant objects we allow to be not
394 wrapped inside a CONST_DECL. */
395 align
= TYPE_ALIGN (TREE_TYPE (exp
));
396 #ifdef CONSTANT_ALIGNMENT
397 if (CONSTANT_CLASS_P (exp
))
398 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
400 known_alignment
= true;
403 /* If there is a non-constant offset part extract the maximum
404 alignment that can prevail. */
407 unsigned int trailing_zeros
= tree_ctz (offset
);
408 if (trailing_zeros
< HOST_BITS_PER_INT
)
410 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
412 align
= MIN (align
, inner
);
417 *bitposp
= bitpos
& (*alignp
- 1);
418 return known_alignment
;
421 /* For a memory reference expression EXP compute values M and N such that M
422 divides (&EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Otherwise return false
424 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
427 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
428 unsigned HOST_WIDE_INT
*bitposp
)
430 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
433 /* Return the alignment in bits of EXP, an object. */
436 get_object_alignment (tree exp
)
438 unsigned HOST_WIDE_INT bitpos
= 0;
441 get_object_alignment_1 (exp
, &align
, &bitpos
);
443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
447 align
= (bitpos
& -bitpos
);
451 /* For a pointer valued expression EXP compute values M and N such that M
452 divides (EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Return false if
454 the results are just a conservative approximation.
456 If EXP is not a pointer, false is returned too. */
459 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
460 unsigned HOST_WIDE_INT
*bitposp
)
464 if (TREE_CODE (exp
) == ADDR_EXPR
)
465 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
466 alignp
, bitposp
, true);
467 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
470 unsigned HOST_WIDE_INT bitpos
;
471 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
473 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
474 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
477 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
478 if (trailing_zeros
< HOST_BITS_PER_INT
)
480 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
482 align
= MIN (align
, inner
);
486 *bitposp
= bitpos
& (align
- 1);
489 else if (TREE_CODE (exp
) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (exp
)))
492 unsigned int ptr_align
, ptr_misalign
;
493 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
495 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
497 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
498 *alignp
= ptr_align
* BITS_PER_UNIT
;
499 /* We cannot really tell whether this result is an approximation. */
505 *alignp
= BITS_PER_UNIT
;
509 else if (TREE_CODE (exp
) == INTEGER_CST
)
511 *alignp
= BIGGEST_ALIGNMENT
;
512 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
513 & (BIGGEST_ALIGNMENT
- 1));
518 *alignp
= BITS_PER_UNIT
;
522 /* Return the alignment in bits of EXP, a pointer valued expression.
523 The alignment returned is, by default, the alignment of the thing that
524 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
526 Otherwise, look at the expression to see if we can do better, i.e., if the
527 expression is actually pointing at an object whose alignment is tighter. */
530 get_pointer_alignment (tree exp
)
532 unsigned HOST_WIDE_INT bitpos
= 0;
535 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
537 /* align and bitpos now specify known low bits of the pointer.
538 ptr & (align - 1) == bitpos. */
541 align
= (bitpos
& -bitpos
);
546 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
547 way, because it could contain a zero byte in the middle.
548 TREE_STRING_LENGTH is the size of the character array, not the string.
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
561 The value returned is of type `ssizetype'.
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
567 c_strlen (tree src
, int only_value
)
570 HOST_WIDE_INT offset
;
576 if (TREE_CODE (src
) == COND_EXPR
577 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
581 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
582 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
583 if (tree_int_cst_equal (len1
, len2
))
587 if (TREE_CODE (src
) == COMPOUND_EXPR
588 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
589 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
591 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
593 src
= string_constant (src
, &offset_node
);
597 max
= TREE_STRING_LENGTH (src
) - 1;
598 ptr
= TREE_STRING_POINTER (src
);
600 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
607 for (i
= 0; i
< max
; i
++)
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
618 return size_diffop_loc (loc
, size_int (max
), offset_node
);
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node
== 0)
625 else if (! tree_fits_shwi_p (offset_node
))
628 offset
= tree_to_shwi (offset_node
);
630 /* If the offset is known to be out of bounds, warn, and call strlen at
632 if (offset
< 0 || offset
> max
)
634 /* Suppress multiple warnings for propagated constant strings. */
636 && !TREE_NO_WARNING (src
))
638 warning_at (loc
, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src
) = 1;
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr
+ offset
));
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
661 src
= string_constant (src
, &offset_node
);
665 if (offset_node
== 0)
666 return TREE_STRING_POINTER (src
);
667 else if (!tree_fits_uhwi_p (offset_node
)
668 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
671 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
674 /* Return a constant integer corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678 c_readstr (const char *str
, machine_mode mode
)
682 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
684 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
685 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
686 / HOST_BITS_PER_WIDE_INT
;
688 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
689 for (i
= 0; i
< len
; i
++)
693 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
696 if (WORDS_BIG_ENDIAN
)
697 j
= GET_MODE_SIZE (mode
) - i
- 1;
698 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
699 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
700 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
704 ch
= (unsigned char) str
[i
];
705 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
708 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
709 return immed_wide_int_const (c
, mode
);
712 /* Cast a target constant CST to target CHAR and if that value fits into
713 host char type, return zero and put that value into variable pointed to by
717 target_char_cast (tree cst
, char *p
)
719 unsigned HOST_WIDE_INT val
, hostval
;
721 if (TREE_CODE (cst
) != INTEGER_CST
722 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
725 /* Do not care if it fits or not right here. */
726 val
= TREE_INT_CST_LOW (cst
);
728 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
729 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
732 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
733 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
742 /* Similar to save_expr, but assumes that arbitrary code is not executed
743 in between the multiple evaluations. In particular, we assume that a
744 non-addressable local variable will not be modified. */
747 builtin_save_expr (tree exp
)
749 if (TREE_CODE (exp
) == SSA_NAME
750 || (TREE_ADDRESSABLE (exp
) == 0
751 && (TREE_CODE (exp
) == PARM_DECL
752 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
755 return save_expr (exp
);
758 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
759 times to get the address of either a higher stack frame, or a return
760 address located within it (depending on FNDECL_CODE). */
763 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
767 #ifdef INITIAL_FRAME_ADDRESS_RTX
768 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
782 tem
= frame_pointer_rtx
;
785 tem
= hard_frame_pointer_rtx
;
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl
->accesses_prior_frames
= 1;
792 /* Some machines need special handling before we can access
793 arbitrary frames. For example, on the SPARC, we must first flush
794 all register windows to the stack. */
795 #ifdef SETUP_FRAME_ADDRESSES
797 SETUP_FRAME_ADDRESSES ();
800 /* On the SPARC, the return address is not in the frame, it is in a
801 register. There is no way to access it off of the current frame
802 pointer, but it can be accessed off the previous frame pointer by
803 reading the value from the register window save area. */
804 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
807 /* Scan back COUNT frames to the specified frame. */
808 for (i
= 0; i
< count
; i
++)
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812 #ifdef DYNAMIC_CHAIN_ADDRESS
813 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
815 tem
= memory_address (Pmode
, tem
);
816 tem
= gen_frame_mem (Pmode
, tem
);
817 tem
= copy_to_reg (tem
);
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
823 #ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem
);
829 /* For __builtin_return_address, get the return address from that frame. */
830 #ifdef RETURN_ADDR_RTX
831 tem
= RETURN_ADDR_RTX (count
, tem
);
833 tem
= memory_address (Pmode
,
834 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
835 tem
= gen_frame_mem (Pmode
, tem
);
840 /* Alias set used for setjmp buffer. */
841 static alias_set_type setjmp_alias_set
= -1;
843 /* Construct the leading half of a __builtin_setjmp call. Control will
844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
848 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
850 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
854 if (setjmp_alias_set
== -1)
855 setjmp_alias_set
= new_alias_set ();
857 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
859 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
865 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
866 set_mem_alias_set (mem
, setjmp_alias_set
);
867 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
869 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
870 GET_MODE_SIZE (Pmode
))),
871 set_mem_alias_set (mem
, setjmp_alias_set
);
873 emit_move_insn (validize_mem (mem
),
874 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
876 stack_save
= gen_rtx_MEM (sa_mode
,
877 plus_constant (Pmode
, buf_addr
,
878 2 * GET_MODE_SIZE (Pmode
)));
879 set_mem_alias_set (stack_save
, setjmp_alias_set
);
880 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
882 /* If there is further processing to do, do it. */
883 if (targetm
.have_builtin_setjmp_setup ())
884 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
886 /* We have a nonlocal label. */
887 cfun
->has_nonlocal_label
= 1;
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
895 expand_builtin_setjmp_receiver (rtx receiver_label
)
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx
);
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
906 if (chain
&& REG_P (chain
))
907 emit_clobber (chain
);
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 if (! targetm
.have_nonlocal_goto ())
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
926 /* Restoring the frame pointer also modifies the hard frame pointer.
927 Mark it used (so that the previous assignment remains live once
928 the frame pointer is eliminated) and clobbered (to represent the
929 implicit update from the assignment). */
930 emit_use (hard_frame_pointer_rtx
);
931 emit_clobber (hard_frame_pointer_rtx
);
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs
[ARG_POINTER_REGNUM
])
937 #ifdef ELIMINABLE_REGS
938 /* If the argument pointer can be eliminated in favor of the
939 frame pointer, we don't need to restore it. We assume here
940 that if such an elimination is present, it can always be used.
941 This is the case on all known machines; if we don't make this
942 assumption, we do unnecessary saving on many machines. */
944 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
946 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
947 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
948 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
951 if (i
== ARRAY_SIZE (elim_regs
))
954 /* Now restore our arg pointer from the address at which it
955 was saved in our stack frame. */
956 emit_move_insn (crtl
->args
.internal_arg_pointer
,
957 copy_to_reg (get_arg_pointer_save_area ()));
962 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
963 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
964 else if (targetm
.have_nonlocal_goto_receiver ())
965 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. */
972 emit_insn (gen_blockage ());
975 /* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
978 the code below is copied from the handling of non-local gotos. */
981 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
984 rtx_insn
*insn
, *last
;
985 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
987 /* DRAP is needed for stack realign if longjmp is expanded to current
989 if (SUPPORTS_STACK_ALIGNMENT
)
990 crtl
->need_drap
= true;
992 if (setjmp_alias_set
== -1)
993 setjmp_alias_set
= new_alias_set ();
995 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
997 buf_addr
= force_reg (Pmode
, buf_addr
);
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value
== const1_rtx
);
1003 last
= get_last_insn ();
1004 if (targetm
.have_builtin_longjmp ())
1005 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1008 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1009 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1010 GET_MODE_SIZE (Pmode
)));
1012 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1013 2 * GET_MODE_SIZE (Pmode
)));
1014 set_mem_alias_set (fp
, setjmp_alias_set
);
1015 set_mem_alias_set (lab
, setjmp_alias_set
);
1016 set_mem_alias_set (stack
, setjmp_alias_set
);
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
1020 if (targetm
.have_nonlocal_goto ())
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1027 lab
= copy_to_reg (lab
);
1029 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1030 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1032 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1033 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1035 emit_use (hard_frame_pointer_rtx
);
1036 emit_use (stack_pointer_rtx
);
1037 emit_indirect_jump (lab
);
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
1046 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1048 gcc_assert (insn
!= last
);
1052 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1055 else if (CALL_P (insn
))
1061 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1063 return (iter
->i
< iter
->n
);
1066 /* This function validates the types of a function call argument list
1067 against a specified list of tree_codes. If the last specifier is a 0,
1068 that represents an ellipses, otherwise the last specifier must be a
1072 validate_arglist (const_tree callexpr
, ...)
1074 enum tree_code code
;
1077 const_call_expr_arg_iterator iter
;
1080 va_start (ap
, callexpr
);
1081 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1085 code
= (enum tree_code
) va_arg (ap
, int);
1089 /* This signifies an ellipses, any further arguments are all ok. */
1093 /* This signifies an endlink, if no arguments remain, return
1094 true, otherwise return false. */
1095 res
= !more_const_call_expr_args_p (&iter
);
1098 /* If no parameters remain or the parameter's code does not
1099 match the specified code, return false. Otherwise continue
1100 checking any remaining arguments. */
1101 arg
= next_const_call_expr_arg (&iter
);
1102 if (!validate_arg (arg
, code
))
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1117 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1121 expand_builtin_nonlocal_goto (tree exp
)
1123 tree t_label
, t_save_area
;
1124 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1127 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1130 t_label
= CALL_EXPR_ARG (exp
, 0);
1131 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1133 r_label
= expand_normal (t_label
);
1134 r_label
= convert_memory_address (Pmode
, r_label
);
1135 r_save_area
= expand_normal (t_save_area
);
1136 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area
= copy_to_reg (r_save_area
);
1140 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1141 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1142 plus_constant (Pmode
, r_save_area
,
1143 GET_MODE_SIZE (Pmode
)));
1145 crtl
->has_nonlocal_goto
= 1;
1147 /* ??? We no longer need to pass the static chain value, afaik. */
1148 if (targetm
.have_nonlocal_goto ())
1149 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1152 r_label
= copy_to_reg (r_label
);
1154 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1155 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1159 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx
);
1164 emit_use (stack_pointer_rtx
);
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1176 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1177 emit_use (pic_offset_table_rtx
);
1179 emit_indirect_jump (r_label
);
1182 /* Search backwards to the jump insn and mark it as a
1184 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1188 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1191 else if (CALL_P (insn
))
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
1204 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1206 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1208 = gen_rtx_MEM (sa_mode
,
1211 plus_constant (Pmode
, buf_addr
,
1212 2 * GET_MODE_SIZE (Pmode
))));
1214 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1217 /* Expand a call to __builtin_prefetch. For a target that does not support
1218 data prefetch, evaluate the memory address argument in case it has side
1222 expand_builtin_prefetch (tree exp
)
1224 tree arg0
, arg1
, arg2
;
1228 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1231 arg0
= CALL_EXPR_ARG (exp
, 0);
1233 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1234 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1236 nargs
= call_expr_nargs (exp
);
1238 arg1
= CALL_EXPR_ARG (exp
, 1);
1240 arg1
= integer_zero_node
;
1242 arg2
= CALL_EXPR_ARG (exp
, 2);
1244 arg2
= integer_three_node
;
1246 /* Argument 0 is an address. */
1247 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1249 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1250 if (TREE_CODE (arg1
) != INTEGER_CST
)
1252 error ("second argument to %<__builtin_prefetch%> must be a constant");
1253 arg1
= integer_zero_node
;
1255 op1
= expand_normal (arg1
);
1256 /* Argument 1 must be either zero or one. */
1257 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1259 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1264 /* Argument 2 (locality) must be a compile-time constant int. */
1265 if (TREE_CODE (arg2
) != INTEGER_CST
)
1267 error ("third argument to %<__builtin_prefetch%> must be a constant");
1268 arg2
= integer_zero_node
;
1270 op2
= expand_normal (arg2
);
1271 /* Argument 2 must be 0, 1, 2, or 3. */
1272 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1274 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 if (targetm
.have_prefetch ())
1280 struct expand_operand ops
[3];
1282 create_address_operand (&ops
[0], op0
);
1283 create_integer_operand (&ops
[1], INTVAL (op1
));
1284 create_integer_operand (&ops
[2], INTVAL (op2
));
1285 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1289 /* Don't do anything with direct references to volatile memory, but
1290 generate code to handle other side effects. */
1291 if (!MEM_P (op0
) && side_effects_p (op0
))
1295 /* Get a MEM rtx for expression EXP which is the address of an operand
1296 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1297 the maximum length of the block of memory that might be accessed or
1301 get_memory_rtx (tree exp
, tree len
)
1303 tree orig_exp
= exp
;
1306 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1307 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1308 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1309 exp
= TREE_OPERAND (exp
, 0);
1311 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1312 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1314 /* Get an expression we can use to find the attributes to assign to MEM.
1315 First remove any nops. */
1316 while (CONVERT_EXPR_P (exp
)
1317 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1318 exp
= TREE_OPERAND (exp
, 0);
1320 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1321 (as builtin stringops may alias with anything). */
1322 exp
= fold_build2 (MEM_REF
,
1323 build_array_type (char_type_node
,
1324 build_range_type (sizetype
,
1325 size_one_node
, len
)),
1326 exp
, build_int_cst (ptr_type_node
, 0));
1328 /* If the MEM_REF has no acceptable address, try to get the base object
1329 from the original address we got, and build an all-aliasing
1330 unknown-sized access to that one. */
1331 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1332 set_mem_attributes (mem
, exp
, 0);
1333 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1334 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1337 exp
= build_fold_addr_expr (exp
);
1338 exp
= fold_build2 (MEM_REF
,
1339 build_array_type (char_type_node
,
1340 build_range_type (sizetype
,
1343 exp
, build_int_cst (ptr_type_node
, 0));
1344 set_mem_attributes (mem
, exp
, 0);
1346 set_mem_alias_set (mem
, 0);
1350 /* Built-in functions to perform an untyped call and return. */
1352 #define apply_args_mode \
1353 (this_target_builtins->x_apply_args_mode)
1354 #define apply_result_mode \
1355 (this_target_builtins->x_apply_result_mode)
1357 /* Return the size required for the block returned by __builtin_apply_args,
1358 and initialize apply_args_mode. */
1361 apply_args_size (void)
1363 static int size
= -1;
1368 /* The values computed by this function never change. */
1371 /* The first value is the incoming arg-pointer. */
1372 size
= GET_MODE_SIZE (Pmode
);
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
1376 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1377 size
+= GET_MODE_SIZE (Pmode
);
1379 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1380 if (FUNCTION_ARG_REGNO_P (regno
))
1382 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1384 gcc_assert (mode
!= VOIDmode
);
1386 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1387 if (size
% align
!= 0)
1388 size
= CEIL (size
, align
) * align
;
1389 size
+= GET_MODE_SIZE (mode
);
1390 apply_args_mode
[regno
] = mode
;
1394 apply_args_mode
[regno
] = VOIDmode
;
1400 /* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1404 apply_result_size (void)
1406 static int size
= -1;
1410 /* The values computed by this function never change. */
1415 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1416 if (targetm
.calls
.function_value_regno_p (regno
))
1418 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1420 gcc_assert (mode
!= VOIDmode
);
1422 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1423 if (size
% align
!= 0)
1424 size
= CEIL (size
, align
) * align
;
1425 size
+= GET_MODE_SIZE (mode
);
1426 apply_result_mode
[regno
] = mode
;
1429 apply_result_mode
[regno
] = VOIDmode
;
1431 /* Allow targets that use untyped_call and untyped_return to override
1432 the size so that machine-specific information can be stored here. */
1433 #ifdef APPLY_RESULT_SIZE
1434 size
= APPLY_RESULT_SIZE
;
1440 /* Create a vector describing the result block RESULT. If SAVEP is true,
1441 the result block is used to save the values; otherwise it is used to
1442 restore the values. */
1445 result_vector (int savep
, rtx result
)
1447 int regno
, size
, align
, nelts
;
1450 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1453 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1454 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1456 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1457 if (size
% align
!= 0)
1458 size
= CEIL (size
, align
) * align
;
1459 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1460 mem
= adjust_address (result
, mode
, size
);
1461 savevec
[nelts
++] = (savep
1462 ? gen_rtx_SET (mem
, reg
)
1463 : gen_rtx_SET (reg
, mem
));
1464 size
+= GET_MODE_SIZE (mode
);
1466 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1469 /* Save the state required to perform an untyped call with the same
1470 arguments as were passed to the current function. */
1473 expand_builtin_apply_args_1 (void)
1476 int size
, align
, regno
;
1478 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1480 /* Create a block where the arg-pointer, structure value address,
1481 and argument registers can be saved. */
1482 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1484 /* Walk past the arg-pointer and structure value address. */
1485 size
= GET_MODE_SIZE (Pmode
);
1486 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1487 size
+= GET_MODE_SIZE (Pmode
);
1489 /* Save each register used in calling a function to the block. */
1490 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1491 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1493 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1494 if (size
% align
!= 0)
1495 size
= CEIL (size
, align
) * align
;
1497 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1499 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1500 size
+= GET_MODE_SIZE (mode
);
1503 /* Save the arg pointer to the block. */
1504 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1505 /* We need the pointer as the caller actually passed them to us, not
1506 as we might have pretended they were passed. Make sure it's a valid
1507 operand, as emit_move_insn isn't expected to handle a PLUS. */
1508 if (STACK_GROWS_DOWNWARD
)
1510 = force_operand (plus_constant (Pmode
, tem
,
1511 crtl
->args
.pretend_args_size
),
1513 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1515 size
= GET_MODE_SIZE (Pmode
);
1517 /* Save the structure value address unless this is passed as an
1518 "invisible" first argument. */
1519 if (struct_incoming_value
)
1521 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1522 copy_to_reg (struct_incoming_value
));
1523 size
+= GET_MODE_SIZE (Pmode
);
1526 /* Return the address of the block. */
1527 return copy_addr_to_reg (XEXP (registers
, 0));
1530 /* __builtin_apply_args returns block of memory allocated on
1531 the stack into which is stored the arg pointer, structure
1532 value address, static chain, and all the registers that might
1533 possibly be used in performing a function call. The code is
1534 moved to the start of the function so the incoming values are
1538 expand_builtin_apply_args (void)
1540 /* Don't do __builtin_apply_args more than once in a function.
1541 Save the result of the first call and reuse it. */
1542 if (apply_args_value
!= 0)
1543 return apply_args_value
;
1545 /* When this function is called, it means that registers must be
1546 saved on entry to this function. So we migrate the
1547 call to the first insn of this function. */
1551 temp
= expand_builtin_apply_args_1 ();
1552 rtx_insn
*seq
= get_insns ();
1555 apply_args_value
= temp
;
1557 /* Put the insns after the NOTE that starts the function.
1558 If this is inside a start_sequence, make the outer-level insn
1559 chain current, so the code is placed at the start of the
1560 function. If internal_arg_pointer is a non-virtual pseudo,
1561 it needs to be placed after the function that initializes
1563 push_topmost_sequence ();
1564 if (REG_P (crtl
->args
.internal_arg_pointer
)
1565 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1566 emit_insn_before (seq
, parm_birth_insn
);
1568 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1569 pop_topmost_sequence ();
1574 /* Perform an untyped call and save the state required to perform an
1575 untyped return of whatever value was returned by the given function. */
1578 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1580 int size
, align
, regno
;
1582 rtx incoming_args
, result
, reg
, dest
, src
;
1583 rtx_call_insn
*call_insn
;
1584 rtx old_stack_level
= 0;
1585 rtx call_fusage
= 0;
1586 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1588 arguments
= convert_memory_address (Pmode
, arguments
);
1590 /* Create a block where the return registers can be saved. */
1591 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args
= gen_reg_rtx (Pmode
);
1595 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1596 if (!STACK_GROWS_DOWNWARD
)
1597 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1598 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1603 do_pending_stack_adjust ();
1606 /* Save the stack with nonlocal if available. */
1607 if (targetm
.have_save_stack_nonlocal ())
1608 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1610 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1612 /* Allocate a block of memory onto the stack and copy the memory
1613 arguments to the outgoing arguments address. We can pass TRUE
1614 as the 4th argument because we just saved the stack pointer
1615 and will restore it right after the call. */
1616 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1618 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1619 may have already set current_function_calls_alloca to true.
1620 current_function_calls_alloca won't be set if argsize is zero,
1621 so we have to guarantee need_drap is true here. */
1622 if (SUPPORTS_STACK_ALIGNMENT
)
1623 crtl
->need_drap
= true;
1625 dest
= virtual_outgoing_args_rtx
;
1626 if (!STACK_GROWS_DOWNWARD
)
1628 if (CONST_INT_P (argsize
))
1629 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1631 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1633 dest
= gen_rtx_MEM (BLKmode
, dest
);
1634 set_mem_align (dest
, PARM_BOUNDARY
);
1635 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1636 set_mem_align (src
, PARM_BOUNDARY
);
1637 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1639 /* Refer to the argument block. */
1641 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1642 set_mem_align (arguments
, PARM_BOUNDARY
);
1644 /* Walk past the arg-pointer and structure value address. */
1645 size
= GET_MODE_SIZE (Pmode
);
1647 size
+= GET_MODE_SIZE (Pmode
);
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1652 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1654 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1655 if (size
% align
!= 0)
1656 size
= CEIL (size
, align
) * align
;
1657 reg
= gen_rtx_REG (mode
, regno
);
1658 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1659 use_reg (&call_fusage
, reg
);
1660 size
+= GET_MODE_SIZE (mode
);
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size
= GET_MODE_SIZE (Pmode
);
1668 rtx value
= gen_reg_rtx (Pmode
);
1669 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1670 emit_move_insn (struct_value
, value
);
1671 if (REG_P (struct_value
))
1672 use_reg (&call_fusage
, struct_value
);
1673 size
+= GET_MODE_SIZE (Pmode
);
1676 /* All arguments and registers used for the call are set up by now! */
1677 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function
) != SYMBOL_REF
)
1683 function
= memory_address (FUNCTION_MODE
, function
);
1685 /* Generate the actual call instruction and save the return value. */
1686 if (targetm
.have_untyped_call ())
1688 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1689 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1690 result_vector (1, result
)));
1693 #ifdef HAVE_call_value
1694 if (HAVE_call_value
)
1698 /* Locate the unique return register. It is not possible to
1699 express a call that sets more than one return register using
1700 call_value; use untyped_call for that. In fact, untyped_call
1701 only needs to save the return registers in the given block. */
1702 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1703 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1705 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1707 valreg
= gen_rtx_REG (mode
, regno
);
1710 emit_call_insn (GEN_CALL_VALUE (valreg
,
1711 gen_rtx_MEM (FUNCTION_MODE
, function
),
1712 const0_rtx
, NULL_RTX
, const0_rtx
));
1714 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1720 /* Find the CALL insn we just emitted, and attach the register usage
1722 call_insn
= last_call_insn ();
1723 add_function_usage_to (call_insn
, call_fusage
);
1725 /* Restore the stack. */
1726 if (targetm
.have_save_stack_nonlocal ())
1727 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1729 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1730 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1734 /* Return the address of the result block. */
1735 result
= copy_addr_to_reg (XEXP (result
, 0));
1736 return convert_memory_address (ptr_mode
, result
);
1739 /* Perform an untyped return. */
1742 expand_builtin_return (rtx result
)
1744 int size
, align
, regno
;
1747 rtx_insn
*call_fusage
= 0;
1749 result
= convert_memory_address (Pmode
, result
);
1751 apply_result_size ();
1752 result
= gen_rtx_MEM (BLKmode
, result
);
1754 if (targetm
.have_untyped_return ())
1756 rtx vector
= result_vector (0, result
);
1757 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1762 /* Restore the return value and note that each value is used. */
1764 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1765 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1767 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1768 if (size
% align
!= 0)
1769 size
= CEIL (size
, align
) * align
;
1770 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1771 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1773 push_to_sequence (call_fusage
);
1775 call_fusage
= get_insns ();
1777 size
+= GET_MODE_SIZE (mode
);
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage
);
1783 /* Return whatever values was restored by jumping directly to the end
1785 expand_naked_return ();
1788 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790 static enum type_class
1791 type_to_class (tree type
)
1793 switch (TREE_CODE (type
))
1795 case VOID_TYPE
: return void_type_class
;
1796 case INTEGER_TYPE
: return integer_type_class
;
1797 case ENUMERAL_TYPE
: return enumeral_type_class
;
1798 case BOOLEAN_TYPE
: return boolean_type_class
;
1799 case POINTER_TYPE
: return pointer_type_class
;
1800 case REFERENCE_TYPE
: return reference_type_class
;
1801 case OFFSET_TYPE
: return offset_type_class
;
1802 case REAL_TYPE
: return real_type_class
;
1803 case COMPLEX_TYPE
: return complex_type_class
;
1804 case FUNCTION_TYPE
: return function_type_class
;
1805 case METHOD_TYPE
: return method_type_class
;
1806 case RECORD_TYPE
: return record_type_class
;
1808 case QUAL_UNION_TYPE
: return union_type_class
;
1809 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1810 ? string_type_class
: array_type_class
);
1811 case LANG_TYPE
: return lang_type_class
;
1812 default: return no_type_class
;
1816 /* Expand a call EXP to __builtin_classify_type. */
1819 expand_builtin_classify_type (tree exp
)
1821 if (call_expr_nargs (exp
))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1823 return GEN_INT (no_type_class
);
1826 /* This helper macro, meant to be used in mathfn_built_in below,
1827 determines which among a set of three builtin math functions is
1828 appropriate for a given type mode. The `F' and `L' cases are
1829 automatically generated from the `double' case. */
1830 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1832 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1833 fcodel = BUILT_IN_MATHFN##L ; break;
1834 /* Similar to above, but appends _R after any F/L suffix. */
1835 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1836 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1837 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1838 fcodel = BUILT_IN_MATHFN##L_R ; break;
1840 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1841 if available. If IMPLICIT is true use the implicit builtin declaration,
1842 otherwise use the explicit declaration. If we can't do the conversion,
1846 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1848 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1852 CASE_MATHFN (BUILT_IN_ACOS
)
1853 CASE_MATHFN (BUILT_IN_ACOSH
)
1854 CASE_MATHFN (BUILT_IN_ASIN
)
1855 CASE_MATHFN (BUILT_IN_ASINH
)
1856 CASE_MATHFN (BUILT_IN_ATAN
)
1857 CASE_MATHFN (BUILT_IN_ATAN2
)
1858 CASE_MATHFN (BUILT_IN_ATANH
)
1859 CASE_MATHFN (BUILT_IN_CBRT
)
1860 CASE_MATHFN (BUILT_IN_CEIL
)
1861 CASE_MATHFN (BUILT_IN_CEXPI
)
1862 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1863 CASE_MATHFN (BUILT_IN_COS
)
1864 CASE_MATHFN (BUILT_IN_COSH
)
1865 CASE_MATHFN (BUILT_IN_DREM
)
1866 CASE_MATHFN (BUILT_IN_ERF
)
1867 CASE_MATHFN (BUILT_IN_ERFC
)
1868 CASE_MATHFN (BUILT_IN_EXP
)
1869 CASE_MATHFN (BUILT_IN_EXP10
)
1870 CASE_MATHFN (BUILT_IN_EXP2
)
1871 CASE_MATHFN (BUILT_IN_EXPM1
)
1872 CASE_MATHFN (BUILT_IN_FABS
)
1873 CASE_MATHFN (BUILT_IN_FDIM
)
1874 CASE_MATHFN (BUILT_IN_FLOOR
)
1875 CASE_MATHFN (BUILT_IN_FMA
)
1876 CASE_MATHFN (BUILT_IN_FMAX
)
1877 CASE_MATHFN (BUILT_IN_FMIN
)
1878 CASE_MATHFN (BUILT_IN_FMOD
)
1879 CASE_MATHFN (BUILT_IN_FREXP
)
1880 CASE_MATHFN (BUILT_IN_GAMMA
)
1881 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1882 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1883 CASE_MATHFN (BUILT_IN_HYPOT
)
1884 CASE_MATHFN (BUILT_IN_ILOGB
)
1885 CASE_MATHFN (BUILT_IN_ICEIL
)
1886 CASE_MATHFN (BUILT_IN_IFLOOR
)
1887 CASE_MATHFN (BUILT_IN_INF
)
1888 CASE_MATHFN (BUILT_IN_IRINT
)
1889 CASE_MATHFN (BUILT_IN_IROUND
)
1890 CASE_MATHFN (BUILT_IN_ISINF
)
1891 CASE_MATHFN (BUILT_IN_J0
)
1892 CASE_MATHFN (BUILT_IN_J1
)
1893 CASE_MATHFN (BUILT_IN_JN
)
1894 CASE_MATHFN (BUILT_IN_LCEIL
)
1895 CASE_MATHFN (BUILT_IN_LDEXP
)
1896 CASE_MATHFN (BUILT_IN_LFLOOR
)
1897 CASE_MATHFN (BUILT_IN_LGAMMA
)
1898 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1899 CASE_MATHFN (BUILT_IN_LLCEIL
)
1900 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1901 CASE_MATHFN (BUILT_IN_LLRINT
)
1902 CASE_MATHFN (BUILT_IN_LLROUND
)
1903 CASE_MATHFN (BUILT_IN_LOG
)
1904 CASE_MATHFN (BUILT_IN_LOG10
)
1905 CASE_MATHFN (BUILT_IN_LOG1P
)
1906 CASE_MATHFN (BUILT_IN_LOG2
)
1907 CASE_MATHFN (BUILT_IN_LOGB
)
1908 CASE_MATHFN (BUILT_IN_LRINT
)
1909 CASE_MATHFN (BUILT_IN_LROUND
)
1910 CASE_MATHFN (BUILT_IN_MODF
)
1911 CASE_MATHFN (BUILT_IN_NAN
)
1912 CASE_MATHFN (BUILT_IN_NANS
)
1913 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1914 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1915 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1916 CASE_MATHFN (BUILT_IN_POW
)
1917 CASE_MATHFN (BUILT_IN_POWI
)
1918 CASE_MATHFN (BUILT_IN_POW10
)
1919 CASE_MATHFN (BUILT_IN_REMAINDER
)
1920 CASE_MATHFN (BUILT_IN_REMQUO
)
1921 CASE_MATHFN (BUILT_IN_RINT
)
1922 CASE_MATHFN (BUILT_IN_ROUND
)
1923 CASE_MATHFN (BUILT_IN_SCALB
)
1924 CASE_MATHFN (BUILT_IN_SCALBLN
)
1925 CASE_MATHFN (BUILT_IN_SCALBN
)
1926 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1927 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1928 CASE_MATHFN (BUILT_IN_SIN
)
1929 CASE_MATHFN (BUILT_IN_SINCOS
)
1930 CASE_MATHFN (BUILT_IN_SINH
)
1931 CASE_MATHFN (BUILT_IN_SQRT
)
1932 CASE_MATHFN (BUILT_IN_TAN
)
1933 CASE_MATHFN (BUILT_IN_TANH
)
1934 CASE_MATHFN (BUILT_IN_TGAMMA
)
1935 CASE_MATHFN (BUILT_IN_TRUNC
)
1936 CASE_MATHFN (BUILT_IN_Y0
)
1937 CASE_MATHFN (BUILT_IN_Y1
)
1938 CASE_MATHFN (BUILT_IN_YN
)
1944 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1946 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1948 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1953 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1956 return builtin_decl_explicit (fcode2
);
1959 /* Like mathfn_built_in_1(), but always use the implicit array. */
1962 mathfn_built_in (tree type
, enum built_in_function fn
)
1964 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1967 /* If errno must be maintained, expand the RTL to check if the result,
1968 TARGET, of a built-in function call, EXP, is NaN, and if so set
1972 expand_errno_check (tree exp
, rtx target
)
1974 rtx_code_label
*lab
= gen_label_rtx ();
1976 /* Test the result; if it is NaN, set errno=EDOM because
1977 the argument was not in the domain. */
1978 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1979 NULL_RTX
, NULL
, lab
,
1980 /* The jump is very likely. */
1981 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1984 /* If this built-in doesn't throw an exception, set errno directly. */
1985 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1987 #ifdef GEN_ERRNO_RTX
1988 rtx errno_rtx
= GEN_ERRNO_RTX
;
1991 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1993 emit_move_insn (errno_rtx
,
1994 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
2000 /* Make sure the library call isn't expanded as a tail call. */
2001 CALL_EXPR_TAILCALL (exp
) = 0;
2003 /* We can't set errno=EDOM directly; let the library call do it.
2004 Pop the arguments right away in case the call gets deleted. */
2006 expand_call (exp
, target
, 0);
2011 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2012 Return NULL_RTX if a normal call should be emitted rather than expanding
2013 the function in-line. EXP is the expression that is a call to the builtin
2014 function; if convenient, the result should be placed in TARGET.
2015 SUBTARGET may be used as the target for computing one of EXP's operands. */
2018 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2020 optab builtin_optab
;
2023 tree fndecl
= get_callee_fndecl (exp
);
2025 bool errno_set
= false;
2026 bool try_widening
= false;
2029 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2032 arg
= CALL_EXPR_ARG (exp
, 0);
2034 switch (DECL_FUNCTION_CODE (fndecl
))
2036 CASE_FLT_FN (BUILT_IN_SQRT
):
2037 errno_set
= ! tree_expr_nonnegative_p (arg
);
2038 try_widening
= true;
2039 builtin_optab
= sqrt_optab
;
2041 CASE_FLT_FN (BUILT_IN_EXP
):
2042 errno_set
= true; builtin_optab
= exp_optab
; break;
2043 CASE_FLT_FN (BUILT_IN_EXP10
):
2044 CASE_FLT_FN (BUILT_IN_POW10
):
2045 errno_set
= true; builtin_optab
= exp10_optab
; break;
2046 CASE_FLT_FN (BUILT_IN_EXP2
):
2047 errno_set
= true; builtin_optab
= exp2_optab
; break;
2048 CASE_FLT_FN (BUILT_IN_EXPM1
):
2049 errno_set
= true; builtin_optab
= expm1_optab
; break;
2050 CASE_FLT_FN (BUILT_IN_LOGB
):
2051 errno_set
= true; builtin_optab
= logb_optab
; break;
2052 CASE_FLT_FN (BUILT_IN_LOG
):
2053 errno_set
= true; builtin_optab
= log_optab
; break;
2054 CASE_FLT_FN (BUILT_IN_LOG10
):
2055 errno_set
= true; builtin_optab
= log10_optab
; break;
2056 CASE_FLT_FN (BUILT_IN_LOG2
):
2057 errno_set
= true; builtin_optab
= log2_optab
; break;
2058 CASE_FLT_FN (BUILT_IN_LOG1P
):
2059 errno_set
= true; builtin_optab
= log1p_optab
; break;
2060 CASE_FLT_FN (BUILT_IN_ASIN
):
2061 builtin_optab
= asin_optab
; break;
2062 CASE_FLT_FN (BUILT_IN_ACOS
):
2063 builtin_optab
= acos_optab
; break;
2064 CASE_FLT_FN (BUILT_IN_TAN
):
2065 builtin_optab
= tan_optab
; break;
2066 CASE_FLT_FN (BUILT_IN_ATAN
):
2067 builtin_optab
= atan_optab
; break;
2068 CASE_FLT_FN (BUILT_IN_FLOOR
):
2069 builtin_optab
= floor_optab
; break;
2070 CASE_FLT_FN (BUILT_IN_CEIL
):
2071 builtin_optab
= ceil_optab
; break;
2072 CASE_FLT_FN (BUILT_IN_TRUNC
):
2073 builtin_optab
= btrunc_optab
; break;
2074 CASE_FLT_FN (BUILT_IN_ROUND
):
2075 builtin_optab
= round_optab
; break;
2076 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2077 builtin_optab
= nearbyint_optab
;
2078 if (flag_trapping_math
)
2080 /* Else fallthrough and expand as rint. */
2081 CASE_FLT_FN (BUILT_IN_RINT
):
2082 builtin_optab
= rint_optab
; break;
2083 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2084 builtin_optab
= significand_optab
; break;
2089 /* Make a suitable register to place result in. */
2090 mode
= TYPE_MODE (TREE_TYPE (exp
));
2092 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2095 /* Before working hard, check whether the instruction is available, but try
2096 to widen the mode for specific operations. */
2097 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2098 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2099 && (!errno_set
|| !optimize_insn_for_size_p ()))
2101 rtx result
= gen_reg_rtx (mode
);
2103 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2104 need to expand the argument again. This way, we will not perform
2105 side-effects more the once. */
2106 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2108 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2112 /* Compute into RESULT.
2113 Set RESULT to wherever the result comes back. */
2114 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2119 expand_errno_check (exp
, result
);
2121 /* Output the entire sequence. */
2122 insns
= get_insns ();
2128 /* If we were unable to expand via the builtin, stop the sequence
2129 (without outputting the insns) and call to the library function
2130 with the stabilized argument list. */
2134 return expand_call (exp
, target
, target
== const0_rtx
);
2137 /* Expand a call to the builtin binary math functions (pow and atan2).
2138 Return NULL_RTX if a normal call should be emitted rather than expanding the
2139 function in-line. EXP is the expression that is a call to the builtin
2140 function; if convenient, the result should be placed in TARGET.
2141 SUBTARGET may be used as the target for computing one of EXP's
2145 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2147 optab builtin_optab
;
2148 rtx op0
, op1
, result
;
2150 int op1_type
= REAL_TYPE
;
2151 tree fndecl
= get_callee_fndecl (exp
);
2154 bool errno_set
= true;
2156 switch (DECL_FUNCTION_CODE (fndecl
))
2158 CASE_FLT_FN (BUILT_IN_SCALBN
):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2160 CASE_FLT_FN (BUILT_IN_LDEXP
):
2161 op1_type
= INTEGER_TYPE
;
2166 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2169 arg0
= CALL_EXPR_ARG (exp
, 0);
2170 arg1
= CALL_EXPR_ARG (exp
, 1);
2172 switch (DECL_FUNCTION_CODE (fndecl
))
2174 CASE_FLT_FN (BUILT_IN_POW
):
2175 builtin_optab
= pow_optab
; break;
2176 CASE_FLT_FN (BUILT_IN_ATAN2
):
2177 builtin_optab
= atan2_optab
; break;
2178 CASE_FLT_FN (BUILT_IN_SCALB
):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2181 builtin_optab
= scalb_optab
; break;
2182 CASE_FLT_FN (BUILT_IN_SCALBN
):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2186 /* Fall through... */
2187 CASE_FLT_FN (BUILT_IN_LDEXP
):
2188 builtin_optab
= ldexp_optab
; break;
2189 CASE_FLT_FN (BUILT_IN_FMOD
):
2190 builtin_optab
= fmod_optab
; break;
2191 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2192 CASE_FLT_FN (BUILT_IN_DREM
):
2193 builtin_optab
= remainder_optab
; break;
2198 /* Make a suitable register to place result in. */
2199 mode
= TYPE_MODE (TREE_TYPE (exp
));
2201 /* Before working hard, check whether the instruction is available. */
2202 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2205 result
= gen_reg_rtx (mode
);
2207 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2210 if (errno_set
&& optimize_insn_for_size_p ())
2213 /* Always stabilize the argument list. */
2214 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2215 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2217 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2218 op1
= expand_normal (arg1
);
2222 /* Compute into RESULT.
2223 Set RESULT to wherever the result comes back. */
2224 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2225 result
, 0, OPTAB_DIRECT
);
2227 /* If we were unable to expand via the builtin, stop the sequence
2228 (without outputting the insns) and call to the library function
2229 with the stabilized argument list. */
2233 return expand_call (exp
, target
, target
== const0_rtx
);
2237 expand_errno_check (exp
, result
);
2239 /* Output the entire sequence. */
2240 insns
= get_insns ();
2247 /* Expand a call to the builtin trinary math functions (fma).
2248 Return NULL_RTX if a normal call should be emitted rather than expanding the
2249 function in-line. EXP is the expression that is a call to the builtin
2250 function; if convenient, the result should be placed in TARGET.
2251 SUBTARGET may be used as the target for computing one of EXP's
2255 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2257 optab builtin_optab
;
2258 rtx op0
, op1
, op2
, result
;
2260 tree fndecl
= get_callee_fndecl (exp
);
2261 tree arg0
, arg1
, arg2
;
2264 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2267 arg0
= CALL_EXPR_ARG (exp
, 0);
2268 arg1
= CALL_EXPR_ARG (exp
, 1);
2269 arg2
= CALL_EXPR_ARG (exp
, 2);
2271 switch (DECL_FUNCTION_CODE (fndecl
))
2273 CASE_FLT_FN (BUILT_IN_FMA
):
2274 builtin_optab
= fma_optab
; break;
2279 /* Make a suitable register to place result in. */
2280 mode
= TYPE_MODE (TREE_TYPE (exp
));
2282 /* Before working hard, check whether the instruction is available. */
2283 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2286 result
= gen_reg_rtx (mode
);
2288 /* Always stabilize the argument list. */
2289 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2290 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2291 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2293 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2294 op1
= expand_normal (arg1
);
2295 op2
= expand_normal (arg2
);
2299 /* Compute into RESULT.
2300 Set RESULT to wherever the result comes back. */
2301 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2304 /* If we were unable to expand via the builtin, stop the sequence
2305 (without outputting the insns) and call to the library function
2306 with the stabilized argument list. */
2310 return expand_call (exp
, target
, target
== const0_rtx
);
2313 /* Output the entire sequence. */
2314 insns
= get_insns ();
2321 /* Expand a call to the builtin sin and cos math functions.
2322 Return NULL_RTX if a normal call should be emitted rather than expanding the
2323 function in-line. EXP is the expression that is a call to the builtin
2324 function; if convenient, the result should be placed in TARGET.
2325 SUBTARGET may be used as the target for computing one of EXP's
2329 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2331 optab builtin_optab
;
2334 tree fndecl
= get_callee_fndecl (exp
);
2338 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2341 arg
= CALL_EXPR_ARG (exp
, 0);
2343 switch (DECL_FUNCTION_CODE (fndecl
))
2345 CASE_FLT_FN (BUILT_IN_SIN
):
2346 CASE_FLT_FN (BUILT_IN_COS
):
2347 builtin_optab
= sincos_optab
; break;
2352 /* Make a suitable register to place result in. */
2353 mode
= TYPE_MODE (TREE_TYPE (exp
));
2355 /* Check if sincos insn is available, otherwise fallback
2356 to sin or cos insn. */
2357 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2358 switch (DECL_FUNCTION_CODE (fndecl
))
2360 CASE_FLT_FN (BUILT_IN_SIN
):
2361 builtin_optab
= sin_optab
; break;
2362 CASE_FLT_FN (BUILT_IN_COS
):
2363 builtin_optab
= cos_optab
; break;
2368 /* Before working hard, check whether the instruction is available. */
2369 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2371 rtx result
= gen_reg_rtx (mode
);
2373 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2374 need to expand the argument again. This way, we will not perform
2375 side-effects more the once. */
2376 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2378 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2382 /* Compute into RESULT.
2383 Set RESULT to wherever the result comes back. */
2384 if (builtin_optab
== sincos_optab
)
2388 switch (DECL_FUNCTION_CODE (fndecl
))
2390 CASE_FLT_FN (BUILT_IN_SIN
):
2391 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2393 CASE_FLT_FN (BUILT_IN_COS
):
2394 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2402 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2406 /* Output the entire sequence. */
2407 insns
= get_insns ();
2413 /* If we were unable to expand via the builtin, stop the sequence
2414 (without outputting the insns) and call to the library function
2415 with the stabilized argument list. */
2419 return expand_call (exp
, target
, target
== const0_rtx
);
2422 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2423 return an RTL instruction code that implements the functionality.
2424 If that isn't possible or available return CODE_FOR_nothing. */
2426 static enum insn_code
2427 interclass_mathfn_icode (tree arg
, tree fndecl
)
2429 bool errno_set
= false;
2430 optab builtin_optab
= unknown_optab
;
2433 switch (DECL_FUNCTION_CODE (fndecl
))
2435 CASE_FLT_FN (BUILT_IN_ILOGB
):
2436 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2437 CASE_FLT_FN (BUILT_IN_ISINF
):
2438 builtin_optab
= isinf_optab
; break;
2439 case BUILT_IN_ISNORMAL
:
2440 case BUILT_IN_ISFINITE
:
2441 CASE_FLT_FN (BUILT_IN_FINITE
):
2442 case BUILT_IN_FINITED32
:
2443 case BUILT_IN_FINITED64
:
2444 case BUILT_IN_FINITED128
:
2445 case BUILT_IN_ISINFD32
:
2446 case BUILT_IN_ISINFD64
:
2447 case BUILT_IN_ISINFD128
:
2448 /* These builtins have no optabs (yet). */
2454 /* There's no easy way to detect the case we need to set EDOM. */
2455 if (flag_errno_math
&& errno_set
)
2456 return CODE_FOR_nothing
;
2458 /* Optab mode depends on the mode of the input argument. */
2459 mode
= TYPE_MODE (TREE_TYPE (arg
));
2462 return optab_handler (builtin_optab
, mode
);
2463 return CODE_FOR_nothing
;
2466 /* Expand a call to one of the builtin math functions that operate on
2467 floating point argument and output an integer result (ilogb, isinf,
2469 Return 0 if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function; if convenient, the result should be placed in TARGET. */
2474 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2476 enum insn_code icode
= CODE_FOR_nothing
;
2478 tree fndecl
= get_callee_fndecl (exp
);
2482 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2485 arg
= CALL_EXPR_ARG (exp
, 0);
2486 icode
= interclass_mathfn_icode (arg
, fndecl
);
2487 mode
= TYPE_MODE (TREE_TYPE (arg
));
2489 if (icode
!= CODE_FOR_nothing
)
2491 struct expand_operand ops
[1];
2492 rtx_insn
*last
= get_last_insn ();
2493 tree orig_arg
= arg
;
2495 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2496 need to expand the argument again. This way, we will not perform
2497 side-effects more the once. */
2498 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2500 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2502 if (mode
!= GET_MODE (op0
))
2503 op0
= convert_to_mode (mode
, op0
, 0);
2505 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2506 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2507 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2508 return ops
[0].value
;
2510 delete_insns_since (last
);
2511 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2517 /* Expand a call to the builtin sincos math function.
2518 Return NULL_RTX if a normal call should be emitted rather than expanding the
2519 function in-line. EXP is the expression that is a call to the builtin
2523 expand_builtin_sincos (tree exp
)
2525 rtx op0
, op1
, op2
, target1
, target2
;
2527 tree arg
, sinp
, cosp
;
2529 location_t loc
= EXPR_LOCATION (exp
);
2530 tree alias_type
, alias_off
;
2532 if (!validate_arglist (exp
, REAL_TYPE
,
2533 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2536 arg
= CALL_EXPR_ARG (exp
, 0);
2537 sinp
= CALL_EXPR_ARG (exp
, 1);
2538 cosp
= CALL_EXPR_ARG (exp
, 2);
2540 /* Make a suitable register to place result in. */
2541 mode
= TYPE_MODE (TREE_TYPE (arg
));
2543 /* Check if sincos insn is available, otherwise emit the call. */
2544 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2547 target1
= gen_reg_rtx (mode
);
2548 target2
= gen_reg_rtx (mode
);
2550 op0
= expand_normal (arg
);
2551 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2552 alias_off
= build_int_cst (alias_type
, 0);
2553 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2555 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2558 /* Compute into target1 and target2.
2559 Set TARGET to wherever the result comes back. */
2560 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2561 gcc_assert (result
);
2563 /* Move target1 and target2 to the memory locations indicated
2565 emit_move_insn (op1
, target1
);
2566 emit_move_insn (op2
, target2
);
2571 /* Expand a call to the internal cexpi builtin to the sincos math function.
2572 EXP is the expression that is a call to the builtin function; if convenient,
2573 the result should be placed in TARGET. */
2576 expand_builtin_cexpi (tree exp
, rtx target
)
2578 tree fndecl
= get_callee_fndecl (exp
);
2582 location_t loc
= EXPR_LOCATION (exp
);
2584 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2587 arg
= CALL_EXPR_ARG (exp
, 0);
2588 type
= TREE_TYPE (arg
);
2589 mode
= TYPE_MODE (TREE_TYPE (arg
));
2591 /* Try expanding via a sincos optab, fall back to emitting a libcall
2592 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2593 is only generated from sincos, cexp or if we have either of them. */
2594 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2596 op1
= gen_reg_rtx (mode
);
2597 op2
= gen_reg_rtx (mode
);
2599 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2601 /* Compute into op1 and op2. */
2602 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2604 else if (targetm
.libc_has_function (function_sincos
))
2606 tree call
, fn
= NULL_TREE
;
2610 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2611 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2612 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2613 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2614 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2615 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2619 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2620 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2621 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2622 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2623 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2624 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2626 /* Make sure not to fold the sincos call again. */
2627 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2628 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2629 call
, 3, arg
, top1
, top2
));
2633 tree call
, fn
= NULL_TREE
, narg
;
2634 tree ctype
= build_complex_type (type
);
2636 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2637 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2638 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2639 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2640 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2641 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2645 /* If we don't have a decl for cexp create one. This is the
2646 friendliest fallback if the user calls __builtin_cexpi
2647 without full target C99 function support. */
2648 if (fn
== NULL_TREE
)
2651 const char *name
= NULL
;
2653 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2655 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2657 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2660 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2661 fn
= build_fn_decl (name
, fntype
);
2664 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2665 build_real (type
, dconst0
), arg
);
2667 /* Make sure not to fold the cexp call again. */
2668 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2669 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2670 target
, VOIDmode
, EXPAND_NORMAL
);
2673 /* Now build the proper return type. */
2674 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2675 make_tree (TREE_TYPE (arg
), op2
),
2676 make_tree (TREE_TYPE (arg
), op1
)),
2677 target
, VOIDmode
, EXPAND_NORMAL
);
2680 /* Conveniently construct a function call expression. FNDECL names the
2681 function to be called, N is the number of arguments, and the "..."
2682 parameters are the argument expressions. Unlike build_call_exr
2683 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2686 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2689 tree fntype
= TREE_TYPE (fndecl
);
2690 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2693 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2695 SET_EXPR_LOCATION (fn
, loc
);
2699 /* Expand a call to one of the builtin rounding functions gcc defines
2700 as an extension (lfloor and lceil). As these are gcc extensions we
2701 do not need to worry about setting errno to EDOM.
2702 If expanding via optab fails, lower expression to (int)(floor(x)).
2703 EXP is the expression that is a call to the builtin function;
2704 if convenient, the result should be placed in TARGET. */
2707 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2709 convert_optab builtin_optab
;
2712 tree fndecl
= get_callee_fndecl (exp
);
2713 enum built_in_function fallback_fn
;
2714 tree fallback_fndecl
;
2718 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2721 arg
= CALL_EXPR_ARG (exp
, 0);
2723 switch (DECL_FUNCTION_CODE (fndecl
))
2725 CASE_FLT_FN (BUILT_IN_ICEIL
):
2726 CASE_FLT_FN (BUILT_IN_LCEIL
):
2727 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2728 builtin_optab
= lceil_optab
;
2729 fallback_fn
= BUILT_IN_CEIL
;
2732 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2733 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2734 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2735 builtin_optab
= lfloor_optab
;
2736 fallback_fn
= BUILT_IN_FLOOR
;
2743 /* Make a suitable register to place result in. */
2744 mode
= TYPE_MODE (TREE_TYPE (exp
));
2746 target
= gen_reg_rtx (mode
);
2748 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2749 need to expand the argument again. This way, we will not perform
2750 side-effects more the once. */
2751 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2753 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2757 /* Compute into TARGET. */
2758 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2760 /* Output the entire sequence. */
2761 insns
= get_insns ();
2767 /* If we were unable to expand via the builtin, stop the sequence
2768 (without outputting the insns). */
2771 /* Fall back to floating point rounding optab. */
2772 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2774 /* For non-C99 targets we may end up without a fallback fndecl here
2775 if the user called __builtin_lfloor directly. In this case emit
2776 a call to the floor/ceil variants nevertheless. This should result
2777 in the best user experience for not full C99 targets. */
2778 if (fallback_fndecl
== NULL_TREE
)
2781 const char *name
= NULL
;
2783 switch (DECL_FUNCTION_CODE (fndecl
))
2785 case BUILT_IN_ICEIL
:
2786 case BUILT_IN_LCEIL
:
2787 case BUILT_IN_LLCEIL
:
2790 case BUILT_IN_ICEILF
:
2791 case BUILT_IN_LCEILF
:
2792 case BUILT_IN_LLCEILF
:
2795 case BUILT_IN_ICEILL
:
2796 case BUILT_IN_LCEILL
:
2797 case BUILT_IN_LLCEILL
:
2800 case BUILT_IN_IFLOOR
:
2801 case BUILT_IN_LFLOOR
:
2802 case BUILT_IN_LLFLOOR
:
2805 case BUILT_IN_IFLOORF
:
2806 case BUILT_IN_LFLOORF
:
2807 case BUILT_IN_LLFLOORF
:
2810 case BUILT_IN_IFLOORL
:
2811 case BUILT_IN_LFLOORL
:
2812 case BUILT_IN_LLFLOORL
:
2819 fntype
= build_function_type_list (TREE_TYPE (arg
),
2820 TREE_TYPE (arg
), NULL_TREE
);
2821 fallback_fndecl
= build_fn_decl (name
, fntype
);
2824 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2826 tmp
= expand_normal (exp
);
2827 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2829 /* Truncate the result of floating point optab to integer
2830 via expand_fix (). */
2831 target
= gen_reg_rtx (mode
);
2832 expand_fix (target
, tmp
, 0);
2837 /* Expand a call to one of the builtin math functions doing integer
2839 Return 0 if a normal call should be emitted rather than expanding the
2840 function in-line. EXP is the expression that is a call to the builtin
2841 function; if convenient, the result should be placed in TARGET. */
2844 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2846 convert_optab builtin_optab
;
2849 tree fndecl
= get_callee_fndecl (exp
);
2852 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2854 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2857 arg
= CALL_EXPR_ARG (exp
, 0);
2859 switch (DECL_FUNCTION_CODE (fndecl
))
2861 CASE_FLT_FN (BUILT_IN_IRINT
):
2862 fallback_fn
= BUILT_IN_LRINT
;
2864 CASE_FLT_FN (BUILT_IN_LRINT
):
2865 CASE_FLT_FN (BUILT_IN_LLRINT
):
2866 builtin_optab
= lrint_optab
;
2869 CASE_FLT_FN (BUILT_IN_IROUND
):
2870 fallback_fn
= BUILT_IN_LROUND
;
2872 CASE_FLT_FN (BUILT_IN_LROUND
):
2873 CASE_FLT_FN (BUILT_IN_LLROUND
):
2874 builtin_optab
= lround_optab
;
2881 /* There's no easy way to detect the case we need to set EDOM. */
2882 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2885 /* Make a suitable register to place result in. */
2886 mode
= TYPE_MODE (TREE_TYPE (exp
));
2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (!flag_errno_math
)
2891 rtx result
= gen_reg_rtx (mode
);
2893 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2894 need to expand the argument again. This way, we will not perform
2895 side-effects more the once. */
2896 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2898 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2902 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2904 /* Output the entire sequence. */
2905 insns
= get_insns ();
2911 /* If we were unable to expand via the builtin, stop the sequence
2912 (without outputting the insns) and call to the library function
2913 with the stabilized argument list. */
2917 if (fallback_fn
!= BUILT_IN_NONE
)
2919 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2920 targets, (int) round (x) should never be transformed into
2921 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2922 a call to lround in the hope that the target provides at least some
2923 C99 functions. This should result in the best user experience for
2924 not full C99 targets. */
2925 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2928 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2929 fallback_fndecl
, 1, arg
);
2931 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2932 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2933 return convert_to_mode (mode
, target
, 0);
2936 return expand_call (exp
, target
, target
== const0_rtx
);
2939 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2940 a normal call should be emitted rather than expanding the function
2941 in-line. EXP is the expression that is a call to the builtin
2942 function; if convenient, the result should be placed in TARGET. */
2945 expand_builtin_powi (tree exp
, rtx target
)
2952 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2955 arg0
= CALL_EXPR_ARG (exp
, 0);
2956 arg1
= CALL_EXPR_ARG (exp
, 1);
2957 mode
= TYPE_MODE (TREE_TYPE (exp
));
2959 /* Emit a libcall to libgcc. */
2961 /* Mode of the 2nd argument must match that of an int. */
2962 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2964 if (target
== NULL_RTX
)
2965 target
= gen_reg_rtx (mode
);
2967 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2968 if (GET_MODE (op0
) != mode
)
2969 op0
= convert_to_mode (mode
, op0
, 0);
2970 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2971 if (GET_MODE (op1
) != mode2
)
2972 op1
= convert_to_mode (mode2
, op1
, 0);
2974 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2975 target
, LCT_CONST
, mode
, 2,
2976 op0
, mode
, op1
, mode2
);
2981 /* Expand expression EXP which is a call to the strlen builtin. Return
2982 NULL_RTX if we failed the caller should emit a normal call, otherwise
2983 try to get the result in TARGET, if convenient. */
2986 expand_builtin_strlen (tree exp
, rtx target
,
2987 machine_mode target_mode
)
2989 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2993 struct expand_operand ops
[4];
2996 tree src
= CALL_EXPR_ARG (exp
, 0);
2998 rtx_insn
*before_strlen
;
2999 machine_mode insn_mode
= target_mode
;
3000 enum insn_code icode
= CODE_FOR_nothing
;
3003 /* If the length can be computed at compile-time, return it. */
3004 len
= c_strlen (src
, 0);
3006 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3008 /* If the length can be computed at compile-time and is constant
3009 integer, but there are side-effects in src, evaluate
3010 src for side-effects, then return len.
3011 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3012 can be optimized into: i++; x = 3; */
3013 len
= c_strlen (src
, 1);
3014 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3016 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3017 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3020 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3022 /* If SRC is not a pointer type, don't do this operation inline. */
3026 /* Bail out if we can't compute strlen in the right mode. */
3027 while (insn_mode
!= VOIDmode
)
3029 icode
= optab_handler (strlen_optab
, insn_mode
);
3030 if (icode
!= CODE_FOR_nothing
)
3033 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3035 if (insn_mode
== VOIDmode
)
3038 /* Make a place to hold the source address. We will not expand
3039 the actual source until we are sure that the expansion will
3040 not fail -- there are trees that cannot be expanded twice. */
3041 src_reg
= gen_reg_rtx (Pmode
);
3043 /* Mark the beginning of the strlen sequence so we can emit the
3044 source operand later. */
3045 before_strlen
= get_last_insn ();
3047 create_output_operand (&ops
[0], target
, insn_mode
);
3048 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3049 create_integer_operand (&ops
[2], 0);
3050 create_integer_operand (&ops
[3], align
);
3051 if (!maybe_expand_insn (icode
, 4, ops
))
3054 /* Now that we are assured of success, expand the source. */
3056 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3059 #ifdef POINTERS_EXTEND_UNSIGNED
3060 if (GET_MODE (pat
) != Pmode
)
3061 pat
= convert_to_mode (Pmode
, pat
,
3062 POINTERS_EXTEND_UNSIGNED
);
3064 emit_move_insn (src_reg
, pat
);
3070 emit_insn_after (pat
, before_strlen
);
3072 emit_insn_before (pat
, get_insns ());
3074 /* Return the value in the proper mode for this function. */
3075 if (GET_MODE (ops
[0].value
) == target_mode
)
3076 target
= ops
[0].value
;
3077 else if (target
!= 0)
3078 convert_move (target
, ops
[0].value
, 0);
3080 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3086 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3087 bytes from constant string DATA + OFFSET and return it as target
3091 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3094 const char *str
= (const char *) data
;
3096 gcc_assert (offset
>= 0
3097 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3098 <= strlen (str
) + 1));
3100 return c_readstr (str
+ offset
, mode
);
3103 /* LEN specify length of the block of memcpy/memset operation.
3104 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3105 In some cases we can make very likely guess on max size, then we
3106 set it into PROBABLE_MAX_SIZE. */
3109 determine_block_size (tree len
, rtx len_rtx
,
3110 unsigned HOST_WIDE_INT
*min_size
,
3111 unsigned HOST_WIDE_INT
*max_size
,
3112 unsigned HOST_WIDE_INT
*probable_max_size
)
3114 if (CONST_INT_P (len_rtx
))
3116 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3122 enum value_range_type range_type
= VR_UNDEFINED
;
3124 /* Determine bounds from the type. */
3125 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3126 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3129 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3130 *probable_max_size
= *max_size
3131 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3133 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3135 if (TREE_CODE (len
) == SSA_NAME
)
3136 range_type
= get_range_info (len
, &min
, &max
);
3137 if (range_type
== VR_RANGE
)
3139 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3140 *min_size
= min
.to_uhwi ();
3141 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3142 *probable_max_size
= *max_size
= max
.to_uhwi ();
3144 else if (range_type
== VR_ANTI_RANGE
)
3146 /* Anti range 0...N lets us to determine minimal size to N+1. */
3149 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3150 *min_size
= max
.to_uhwi () + 1;
3158 Produce anti range allowing negative values of N. We still
3159 can use the information and make a guess that N is not negative.
3161 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3162 *probable_max_size
= min
.to_uhwi () - 1;
3165 gcc_checking_assert (*max_size
<=
3166 (unsigned HOST_WIDE_INT
)
3167 GET_MODE_MASK (GET_MODE (len_rtx
)));
3170 /* Helper function to do the actual work for expand_builtin_memcpy. */
3173 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3175 const char *src_str
;
3176 unsigned int src_align
= get_pointer_alignment (src
);
3177 unsigned int dest_align
= get_pointer_alignment (dest
);
3178 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3179 HOST_WIDE_INT expected_size
= -1;
3180 unsigned int expected_align
= 0;
3181 unsigned HOST_WIDE_INT min_size
;
3182 unsigned HOST_WIDE_INT max_size
;
3183 unsigned HOST_WIDE_INT probable_max_size
;
3185 /* If DEST is not a pointer type, call the normal function. */
3186 if (dest_align
== 0)
3189 /* If either SRC is not a pointer type, don't do this
3190 operation in-line. */
3194 if (currently_expanding_gimple_stmt
)
3195 stringop_block_profile (currently_expanding_gimple_stmt
,
3196 &expected_align
, &expected_size
);
3198 if (expected_align
< dest_align
)
3199 expected_align
= dest_align
;
3200 dest_mem
= get_memory_rtx (dest
, len
);
3201 set_mem_align (dest_mem
, dest_align
);
3202 len_rtx
= expand_normal (len
);
3203 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3204 &probable_max_size
);
3205 src_str
= c_getstr (src
);
3207 /* If SRC is a string constant and block move would be done
3208 by pieces, we can avoid loading the string from memory
3209 and only stored the computed constants. */
3211 && CONST_INT_P (len_rtx
)
3212 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3213 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3214 CONST_CAST (char *, src_str
),
3217 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3218 builtin_memcpy_read_str
,
3219 CONST_CAST (char *, src_str
),
3220 dest_align
, false, 0);
3221 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3222 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3226 src_mem
= get_memory_rtx (src
, len
);
3227 set_mem_align (src_mem
, src_align
);
3229 /* Copy word part most expediently. */
3230 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3231 CALL_EXPR_TAILCALL (exp
)
3232 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3233 expected_align
, expected_size
,
3234 min_size
, max_size
, probable_max_size
);
3238 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3239 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3245 /* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3251 expand_builtin_memcpy (tree exp
, rtx target
)
3253 if (!validate_arglist (exp
,
3254 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3258 tree dest
= CALL_EXPR_ARG (exp
, 0);
3259 tree src
= CALL_EXPR_ARG (exp
, 1);
3260 tree len
= CALL_EXPR_ARG (exp
, 2);
3261 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3265 /* Expand an instrumented call EXP to the memcpy builtin.
3266 Return NULL_RTX if we failed, the caller should emit a normal call,
3267 otherwise try to get the result in TARGET, if convenient (and in
3268 mode MODE if that's convenient). */
3271 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3273 if (!validate_arglist (exp
,
3274 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3275 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3276 INTEGER_TYPE
, VOID_TYPE
))
3280 tree dest
= CALL_EXPR_ARG (exp
, 0);
3281 tree src
= CALL_EXPR_ARG (exp
, 2);
3282 tree len
= CALL_EXPR_ARG (exp
, 4);
3283 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3285 /* Return src bounds with the result. */
3288 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3289 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3290 res
= chkp_join_splitted_slot (res
, bnd
);
3296 /* Expand a call EXP to the mempcpy builtin.
3297 Return NULL_RTX if we failed; the caller should emit a normal call,
3298 otherwise try to get the result in TARGET, if convenient (and in
3299 mode MODE if that's convenient). If ENDP is 0 return the
3300 destination pointer, if ENDP is 1 return the end pointer ala
3301 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3305 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3307 if (!validate_arglist (exp
,
3308 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3312 tree dest
= CALL_EXPR_ARG (exp
, 0);
3313 tree src
= CALL_EXPR_ARG (exp
, 1);
3314 tree len
= CALL_EXPR_ARG (exp
, 2);
3315 return expand_builtin_mempcpy_args (dest
, src
, len
,
3316 target
, mode
, /*endp=*/ 1,
3321 /* Expand an instrumented call EXP to the mempcpy builtin.
3322 Return NULL_RTX if we failed, the caller should emit a normal call,
3323 otherwise try to get the result in TARGET, if convenient (and in
3324 mode MODE if that's convenient). */
3327 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3329 if (!validate_arglist (exp
,
3330 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3331 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3332 INTEGER_TYPE
, VOID_TYPE
))
3336 tree dest
= CALL_EXPR_ARG (exp
, 0);
3337 tree src
= CALL_EXPR_ARG (exp
, 2);
3338 tree len
= CALL_EXPR_ARG (exp
, 4);
3339 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3342 /* Return src bounds with the result. */
3345 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3346 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3347 res
= chkp_join_splitted_slot (res
, bnd
);
3353 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3354 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3355 so that this can also be called without constructing an actual CALL_EXPR.
3356 The other arguments and return value are the same as for
3357 expand_builtin_mempcpy. */
3360 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3361 rtx target
, machine_mode mode
, int endp
,
3364 tree fndecl
= get_callee_fndecl (orig_exp
);
3366 /* If return value is ignored, transform mempcpy into memcpy. */
3367 if (target
== const0_rtx
3368 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3369 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3371 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3372 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3374 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3376 else if (target
== const0_rtx
3377 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3379 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3380 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3382 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3386 const char *src_str
;
3387 unsigned int src_align
= get_pointer_alignment (src
);
3388 unsigned int dest_align
= get_pointer_alignment (dest
);
3389 rtx dest_mem
, src_mem
, len_rtx
;
3391 /* If either SRC or DEST is not a pointer type, don't do this
3392 operation in-line. */
3393 if (dest_align
== 0 || src_align
== 0)
3396 /* If LEN is not constant, call the normal function. */
3397 if (! tree_fits_uhwi_p (len
))
3400 len_rtx
= expand_normal (len
);
3401 src_str
= c_getstr (src
);
3403 /* If SRC is a string constant and block move would be done
3404 by pieces, we can avoid loading the string from memory
3405 and only stored the computed constants. */
3407 && CONST_INT_P (len_rtx
)
3408 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3409 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3410 CONST_CAST (char *, src_str
),
3413 dest_mem
= get_memory_rtx (dest
, len
);
3414 set_mem_align (dest_mem
, dest_align
);
3415 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3416 builtin_memcpy_read_str
,
3417 CONST_CAST (char *, src_str
),
3418 dest_align
, false, endp
);
3419 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3420 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3424 if (CONST_INT_P (len_rtx
)
3425 && can_move_by_pieces (INTVAL (len_rtx
),
3426 MIN (dest_align
, src_align
)))
3428 dest_mem
= get_memory_rtx (dest
, len
);
3429 set_mem_align (dest_mem
, dest_align
);
3430 src_mem
= get_memory_rtx (src
, len
);
3431 set_mem_align (src_mem
, src_align
);
3432 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3433 MIN (dest_align
, src_align
), endp
);
3434 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3435 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3444 # define HAVE_movstr 0
3445 # define CODE_FOR_movstr CODE_FOR_nothing
3448 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3449 we failed, the caller should emit a normal call, otherwise try to
3450 get the result in TARGET, if convenient. If ENDP is 0 return the
3451 destination pointer, if ENDP is 1 return the end pointer ala
3452 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3456 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3458 struct expand_operand ops
[3];
3465 dest_mem
= get_memory_rtx (dest
, NULL
);
3466 src_mem
= get_memory_rtx (src
, NULL
);
3469 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3470 dest_mem
= replace_equiv_address (dest_mem
, target
);
3473 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3474 create_fixed_operand (&ops
[1], dest_mem
);
3475 create_fixed_operand (&ops
[2], src_mem
);
3476 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3479 if (endp
&& target
!= const0_rtx
)
3481 target
= ops
[0].value
;
3482 /* movstr is supposed to set end to the address of the NUL
3483 terminator. If the caller requested a mempcpy-like return value,
3487 rtx tem
= plus_constant (GET_MODE (target
),
3488 gen_lowpart (GET_MODE (target
), target
), 1);
3489 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3495 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3496 NULL_RTX if we failed the caller should emit a normal call, otherwise
3497 try to get the result in TARGET, if convenient (and in mode MODE if that's
3501 expand_builtin_strcpy (tree exp
, rtx target
)
3503 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3505 tree dest
= CALL_EXPR_ARG (exp
, 0);
3506 tree src
= CALL_EXPR_ARG (exp
, 1);
3507 return expand_builtin_strcpy_args (dest
, src
, target
);
3512 /* Helper function to do the actual work for expand_builtin_strcpy. The
3513 arguments to the builtin_strcpy call DEST and SRC are broken out
3514 so that this can also be called without constructing an actual CALL_EXPR.
3515 The other arguments and return value are the same as for
3516 expand_builtin_strcpy. */
3519 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3521 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3524 /* Expand a call EXP to the stpcpy builtin.
3525 Return NULL_RTX if we failed the caller should emit a normal call,
3526 otherwise try to get the result in TARGET, if convenient (and in
3527 mode MODE if that's convenient). */
3530 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3533 location_t loc
= EXPR_LOCATION (exp
);
3535 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3538 dst
= CALL_EXPR_ARG (exp
, 0);
3539 src
= CALL_EXPR_ARG (exp
, 1);
3541 /* If return value is ignored, transform stpcpy into strcpy. */
3542 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3544 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3545 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3546 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3553 /* Ensure we get an actual string whose length can be evaluated at
3554 compile-time, not an expression containing a string. This is
3555 because the latter will potentially produce pessimized code
3556 when used to produce the return value. */
3557 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3558 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3560 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3561 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3562 target
, mode
, /*endp=*/2,
3568 if (TREE_CODE (len
) == INTEGER_CST
)
3570 rtx len_rtx
= expand_normal (len
);
3572 if (CONST_INT_P (len_rtx
))
3574 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3580 if (mode
!= VOIDmode
)
3581 target
= gen_reg_rtx (mode
);
3583 target
= gen_reg_rtx (GET_MODE (ret
));
3585 if (GET_MODE (target
) != GET_MODE (ret
))
3586 ret
= gen_lowpart (GET_MODE (target
), ret
);
3588 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3589 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3597 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3601 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3602 bytes from constant string DATA + OFFSET and return it as target
3606 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3609 const char *str
= (const char *) data
;
3611 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3614 return c_readstr (str
+ offset
, mode
);
3617 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3618 NULL_RTX if we failed the caller should emit a normal call. */
3621 expand_builtin_strncpy (tree exp
, rtx target
)
3623 location_t loc
= EXPR_LOCATION (exp
);
3625 if (validate_arglist (exp
,
3626 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3628 tree dest
= CALL_EXPR_ARG (exp
, 0);
3629 tree src
= CALL_EXPR_ARG (exp
, 1);
3630 tree len
= CALL_EXPR_ARG (exp
, 2);
3631 tree slen
= c_strlen (src
, 1);
3633 /* We must be passed a constant len and src parameter. */
3634 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3637 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3639 /* We're required to pad with trailing zeros if the requested
3640 len is greater than strlen(s2)+1. In that case try to
3641 use store_by_pieces, if it fails, punt. */
3642 if (tree_int_cst_lt (slen
, len
))
3644 unsigned int dest_align
= get_pointer_alignment (dest
);
3645 const char *p
= c_getstr (src
);
3648 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3649 || !can_store_by_pieces (tree_to_uhwi (len
),
3650 builtin_strncpy_read_str
,
3651 CONST_CAST (char *, p
),
3655 dest_mem
= get_memory_rtx (dest
, len
);
3656 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3657 builtin_strncpy_read_str
,
3658 CONST_CAST (char *, p
), dest_align
, false, 0);
3659 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3660 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3667 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3668 bytes from constant string DATA + OFFSET and return it as target
3672 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3675 const char *c
= (const char *) data
;
3676 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3678 memset (p
, *c
, GET_MODE_SIZE (mode
));
3680 return c_readstr (p
, mode
);
3683 /* Callback routine for store_by_pieces. Return the RTL of a register
3684 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3685 char value given in the RTL register data. For example, if mode is
3686 4 bytes wide, return the RTL for 0x01010101*data. */
3689 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3696 size
= GET_MODE_SIZE (mode
);
3700 p
= XALLOCAVEC (char, size
);
3701 memset (p
, 1, size
);
3702 coeff
= c_readstr (p
, mode
);
3704 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3705 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3706 return force_reg (mode
, target
);
3709 /* Expand expression EXP, which is a call to the memset builtin. Return
3710 NULL_RTX if we failed the caller should emit a normal call, otherwise
3711 try to get the result in TARGET, if convenient (and in mode MODE if that's
3715 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3717 if (!validate_arglist (exp
,
3718 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3722 tree dest
= CALL_EXPR_ARG (exp
, 0);
3723 tree val
= CALL_EXPR_ARG (exp
, 1);
3724 tree len
= CALL_EXPR_ARG (exp
, 2);
3725 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3729 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3730 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3731 try to get the result in TARGET, if convenient (and in mode MODE if that's
3735 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3737 if (!validate_arglist (exp
,
3738 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3739 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3743 tree dest
= CALL_EXPR_ARG (exp
, 0);
3744 tree val
= CALL_EXPR_ARG (exp
, 2);
3745 tree len
= CALL_EXPR_ARG (exp
, 3);
3746 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3748 /* Return src bounds with the result. */
3751 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3752 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3753 res
= chkp_join_splitted_slot (res
, bnd
);
3759 /* Helper function to do the actual work for expand_builtin_memset. The
3760 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3761 so that this can also be called without constructing an actual CALL_EXPR.
3762 The other arguments and return value are the same as for
3763 expand_builtin_memset. */
3766 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3767 rtx target
, machine_mode mode
, tree orig_exp
)
3770 enum built_in_function fcode
;
3771 machine_mode val_mode
;
3773 unsigned int dest_align
;
3774 rtx dest_mem
, dest_addr
, len_rtx
;
3775 HOST_WIDE_INT expected_size
= -1;
3776 unsigned int expected_align
= 0;
3777 unsigned HOST_WIDE_INT min_size
;
3778 unsigned HOST_WIDE_INT max_size
;
3779 unsigned HOST_WIDE_INT probable_max_size
;
3781 dest_align
= get_pointer_alignment (dest
);
3783 /* If DEST is not a pointer type, don't do this operation in-line. */
3784 if (dest_align
== 0)
3787 if (currently_expanding_gimple_stmt
)
3788 stringop_block_profile (currently_expanding_gimple_stmt
,
3789 &expected_align
, &expected_size
);
3791 if (expected_align
< dest_align
)
3792 expected_align
= dest_align
;
3794 /* If the LEN parameter is zero, return DEST. */
3795 if (integer_zerop (len
))
3797 /* Evaluate and ignore VAL in case it has side-effects. */
3798 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3799 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3802 /* Stabilize the arguments in case we fail. */
3803 dest
= builtin_save_expr (dest
);
3804 val
= builtin_save_expr (val
);
3805 len
= builtin_save_expr (len
);
3807 len_rtx
= expand_normal (len
);
3808 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3809 &probable_max_size
);
3810 dest_mem
= get_memory_rtx (dest
, len
);
3811 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3813 if (TREE_CODE (val
) != INTEGER_CST
)
3817 val_rtx
= expand_normal (val
);
3818 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3820 /* Assume that we can memset by pieces if we can store
3821 * the coefficients by pieces (in the required modes).
3822 * We can't pass builtin_memset_gen_str as that emits RTL. */
3824 if (tree_fits_uhwi_p (len
)
3825 && can_store_by_pieces (tree_to_uhwi (len
),
3826 builtin_memset_read_str
, &c
, dest_align
,
3829 val_rtx
= force_reg (val_mode
, val_rtx
);
3830 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3831 builtin_memset_gen_str
, val_rtx
, dest_align
,
3834 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3835 dest_align
, expected_align
,
3836 expected_size
, min_size
, max_size
,
3840 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3841 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3845 if (target_char_cast (val
, &c
))
3850 if (tree_fits_uhwi_p (len
)
3851 && can_store_by_pieces (tree_to_uhwi (len
),
3852 builtin_memset_read_str
, &c
, dest_align
,
3854 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3855 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3856 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3857 gen_int_mode (c
, val_mode
),
3858 dest_align
, expected_align
,
3859 expected_size
, min_size
, max_size
,
3863 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3864 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3868 set_mem_align (dest_mem
, dest_align
);
3869 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3870 CALL_EXPR_TAILCALL (orig_exp
)
3871 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3872 expected_align
, expected_size
,
3878 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3879 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3885 fndecl
= get_callee_fndecl (orig_exp
);
3886 fcode
= DECL_FUNCTION_CODE (fndecl
);
3887 if (fcode
== BUILT_IN_MEMSET
3888 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3889 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3891 else if (fcode
== BUILT_IN_BZERO
)
3892 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3896 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3897 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3898 return expand_call (fn
, target
, target
== const0_rtx
);
3901 /* Expand expression EXP, which is a call to the bzero builtin. Return
3902 NULL_RTX if we failed the caller should emit a normal call. */
3905 expand_builtin_bzero (tree exp
)
3908 location_t loc
= EXPR_LOCATION (exp
);
3910 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3913 dest
= CALL_EXPR_ARG (exp
, 0);
3914 size
= CALL_EXPR_ARG (exp
, 1);
3916 /* New argument list transforming bzero(ptr x, int y) to
3917 memset(ptr x, int 0, size_t y). This is done this way
3918 so that if it isn't expanded inline, we fallback to
3919 calling bzero instead of memset. */
3921 return expand_builtin_memset_args (dest
, integer_zero_node
,
3922 fold_convert_loc (loc
,
3923 size_type_node
, size
),
3924 const0_rtx
, VOIDmode
, exp
);
3927 /* Expand expression EXP, which is a call to the memcmp built-in function.
3928 Return NULL_RTX if we failed and the caller should emit a normal call,
3929 otherwise try to get the result in TARGET, if convenient (and in mode
3930 MODE, if that's convenient). */
3933 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3934 ATTRIBUTE_UNUSED machine_mode mode
)
3936 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3938 if (!validate_arglist (exp
,
3939 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3942 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3943 implementing memcmp because it will stop if it encounters two
3945 #if defined HAVE_cmpmemsi
3947 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3950 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3951 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3952 tree len
= CALL_EXPR_ARG (exp
, 2);
3954 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3955 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3956 machine_mode insn_mode
;
3959 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3963 /* If we don't have POINTER_TYPE, call the function. */
3964 if (arg1_align
== 0 || arg2_align
== 0)
3967 /* Make a place to write the result of the instruction. */
3970 && REG_P (result
) && GET_MODE (result
) == insn_mode
3971 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3972 result
= gen_reg_rtx (insn_mode
);
3974 arg1_rtx
= get_memory_rtx (arg1
, len
);
3975 arg2_rtx
= get_memory_rtx (arg2
, len
);
3976 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3978 /* Set MEM_SIZE as appropriate. */
3979 if (CONST_INT_P (arg3_rtx
))
3981 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3982 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3986 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3987 GEN_INT (MIN (arg1_align
, arg2_align
)));
3994 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3995 TYPE_MODE (integer_type_node
), 3,
3996 XEXP (arg1_rtx
, 0), Pmode
,
3997 XEXP (arg2_rtx
, 0), Pmode
,
3998 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3999 TYPE_UNSIGNED (sizetype
)),
4000 TYPE_MODE (sizetype
));
4002 /* Return the value in the proper mode for this function. */
4003 mode
= TYPE_MODE (TREE_TYPE (exp
));
4004 if (GET_MODE (result
) == mode
)
4006 else if (target
!= 0)
4008 convert_move (target
, result
, 0);
4012 return convert_to_mode (mode
, result
, 0);
4014 #endif /* HAVE_cmpmemsi. */
4019 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4020 if we failed the caller should emit a normal call, otherwise try to get
4021 the result in TARGET, if convenient. */
4024 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4026 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4029 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4030 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4031 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4033 rtx arg1_rtx
, arg2_rtx
;
4034 rtx result
, insn
= NULL_RTX
;
4036 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4037 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4039 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4040 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4042 /* If we don't have POINTER_TYPE, call the function. */
4043 if (arg1_align
== 0 || arg2_align
== 0)
4046 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4047 arg1
= builtin_save_expr (arg1
);
4048 arg2
= builtin_save_expr (arg2
);
4050 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4051 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4053 #ifdef HAVE_cmpstrsi
4054 /* Try to call cmpstrsi. */
4057 machine_mode insn_mode
4058 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4060 /* Make a place to write the result of the instruction. */
4063 && REG_P (result
) && GET_MODE (result
) == insn_mode
4064 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4065 result
= gen_reg_rtx (insn_mode
);
4067 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4068 GEN_INT (MIN (arg1_align
, arg2_align
)));
4071 #ifdef HAVE_cmpstrnsi
4072 /* Try to determine at least one length and call cmpstrnsi. */
4073 if (!insn
&& HAVE_cmpstrnsi
)
4078 machine_mode insn_mode
4079 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4080 tree len1
= c_strlen (arg1
, 1);
4081 tree len2
= c_strlen (arg2
, 1);
4084 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4086 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4088 /* If we don't have a constant length for the first, use the length
4089 of the second, if we know it. We don't require a constant for
4090 this case; some cost analysis could be done if both are available
4091 but neither is constant. For now, assume they're equally cheap,
4092 unless one has side effects. If both strings have constant lengths,
4099 else if (TREE_SIDE_EFFECTS (len1
))
4101 else if (TREE_SIDE_EFFECTS (len2
))
4103 else if (TREE_CODE (len1
) != INTEGER_CST
)
4105 else if (TREE_CODE (len2
) != INTEGER_CST
)
4107 else if (tree_int_cst_lt (len1
, len2
))
4112 /* If both arguments have side effects, we cannot optimize. */
4113 if (!len
|| TREE_SIDE_EFFECTS (len
))
4116 arg3_rtx
= expand_normal (len
);
4118 /* Make a place to write the result of the instruction. */
4121 && REG_P (result
) && GET_MODE (result
) == insn_mode
4122 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4123 result
= gen_reg_rtx (insn_mode
);
4125 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4126 GEN_INT (MIN (arg1_align
, arg2_align
)));
4135 /* Return the value in the proper mode for this function. */
4136 mode
= TYPE_MODE (TREE_TYPE (exp
));
4137 if (GET_MODE (result
) == mode
)
4140 return convert_to_mode (mode
, result
, 0);
4141 convert_move (target
, result
, 0);
4145 /* Expand the library call ourselves using a stabilized argument
4146 list to avoid re-evaluating the function's arguments twice. */
4147 #ifdef HAVE_cmpstrnsi
4150 fndecl
= get_callee_fndecl (exp
);
4151 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4152 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4153 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4154 return expand_call (fn
, target
, target
== const0_rtx
);
4160 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4161 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4162 the result in TARGET, if convenient. */
4165 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4166 ATTRIBUTE_UNUSED machine_mode mode
)
4168 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4170 if (!validate_arglist (exp
,
4171 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4174 /* If c_strlen can determine an expression for one of the string
4175 lengths, and it doesn't have side effects, then emit cmpstrnsi
4176 using length MIN(strlen(string)+1, arg3). */
4177 #ifdef HAVE_cmpstrnsi
4180 tree len
, len1
, len2
;
4181 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4184 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4185 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4186 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4188 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4189 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4190 machine_mode insn_mode
4191 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4193 len1
= c_strlen (arg1
, 1);
4194 len2
= c_strlen (arg2
, 1);
4197 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4199 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4201 /* If we don't have a constant length for the first, use the length
4202 of the second, if we know it. We don't require a constant for
4203 this case; some cost analysis could be done if both are available
4204 but neither is constant. For now, assume they're equally cheap,
4205 unless one has side effects. If both strings have constant lengths,
4212 else if (TREE_SIDE_EFFECTS (len1
))
4214 else if (TREE_SIDE_EFFECTS (len2
))
4216 else if (TREE_CODE (len1
) != INTEGER_CST
)
4218 else if (TREE_CODE (len2
) != INTEGER_CST
)
4220 else if (tree_int_cst_lt (len1
, len2
))
4225 /* If both arguments have side effects, we cannot optimize. */
4226 if (!len
|| TREE_SIDE_EFFECTS (len
))
4229 /* The actual new length parameter is MIN(len,arg3). */
4230 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4231 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4233 /* If we don't have POINTER_TYPE, call the function. */
4234 if (arg1_align
== 0 || arg2_align
== 0)
4237 /* Make a place to write the result of the instruction. */
4240 && REG_P (result
) && GET_MODE (result
) == insn_mode
4241 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4242 result
= gen_reg_rtx (insn_mode
);
4244 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4245 arg1
= builtin_save_expr (arg1
);
4246 arg2
= builtin_save_expr (arg2
);
4247 len
= builtin_save_expr (len
);
4249 arg1_rtx
= get_memory_rtx (arg1
, len
);
4250 arg2_rtx
= get_memory_rtx (arg2
, len
);
4251 arg3_rtx
= expand_normal (len
);
4252 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4253 GEN_INT (MIN (arg1_align
, arg2_align
)));
4258 /* Return the value in the proper mode for this function. */
4259 mode
= TYPE_MODE (TREE_TYPE (exp
));
4260 if (GET_MODE (result
) == mode
)
4263 return convert_to_mode (mode
, result
, 0);
4264 convert_move (target
, result
, 0);
4268 /* Expand the library call ourselves using a stabilized argument
4269 list to avoid re-evaluating the function's arguments twice. */
4270 fndecl
= get_callee_fndecl (exp
);
4271 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4273 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4274 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4275 return expand_call (fn
, target
, target
== const0_rtx
);
4281 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4282 if that's convenient. */
4285 expand_builtin_saveregs (void)
4290 /* Don't do __builtin_saveregs more than once in a function.
4291 Save the result of the first call and reuse it. */
4292 if (saveregs_value
!= 0)
4293 return saveregs_value
;
4295 /* When this function is called, it means that registers must be
4296 saved on entry to this function. So we migrate the call to the
4297 first insn of this function. */
4301 /* Do whatever the machine needs done in this case. */
4302 val
= targetm
.calls
.expand_builtin_saveregs ();
4307 saveregs_value
= val
;
4309 /* Put the insns after the NOTE that starts the function. If this
4310 is inside a start_sequence, make the outer-level insn chain current, so
4311 the code is placed at the start of the function. */
4312 push_topmost_sequence ();
4313 emit_insn_after (seq
, entry_of_function ());
4314 pop_topmost_sequence ();
4319 /* Expand a call to __builtin_next_arg. */
4322 expand_builtin_next_arg (void)
4324 /* Checking arguments is already done in fold_builtin_next_arg
4325 that must be called before this function. */
4326 return expand_binop (ptr_mode
, add_optab
,
4327 crtl
->args
.internal_arg_pointer
,
4328 crtl
->args
.arg_offset_rtx
,
4329 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4332 /* Make it easier for the backends by protecting the valist argument
4333 from multiple evaluations. */
4336 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4338 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4340 /* The current way of determining the type of valist is completely
4341 bogus. We should have the information on the va builtin instead. */
4343 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4345 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4347 if (TREE_SIDE_EFFECTS (valist
))
4348 valist
= save_expr (valist
);
4350 /* For this case, the backends will be expecting a pointer to
4351 vatype, but it's possible we've actually been given an array
4352 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4354 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4356 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4357 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4362 tree pt
= build_pointer_type (vatype
);
4366 if (! TREE_SIDE_EFFECTS (valist
))
4369 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4370 TREE_SIDE_EFFECTS (valist
) = 1;
4373 if (TREE_SIDE_EFFECTS (valist
))
4374 valist
= save_expr (valist
);
4375 valist
= fold_build2_loc (loc
, MEM_REF
,
4376 vatype
, valist
, build_int_cst (pt
, 0));
4382 /* The "standard" definition of va_list is void*. */
4385 std_build_builtin_va_list (void)
4387 return ptr_type_node
;
4390 /* The "standard" abi va_list is va_list_type_node. */
4393 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4395 return va_list_type_node
;
4398 /* The "standard" type of va_list is va_list_type_node. */
4401 std_canonical_va_list_type (tree type
)
4405 if (INDIRECT_REF_P (type
))
4406 type
= TREE_TYPE (type
);
4407 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4408 type
= TREE_TYPE (type
);
4409 wtype
= va_list_type_node
;
4411 /* Treat structure va_list types. */
4412 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4413 htype
= TREE_TYPE (htype
);
4414 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4416 /* If va_list is an array type, the argument may have decayed
4417 to a pointer type, e.g. by being passed to another function.
4418 In that case, unwrap both types so that we can compare the
4419 underlying records. */
4420 if (TREE_CODE (htype
) == ARRAY_TYPE
4421 || POINTER_TYPE_P (htype
))
4423 wtype
= TREE_TYPE (wtype
);
4424 htype
= TREE_TYPE (htype
);
4427 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4428 return va_list_type_node
;
4433 /* The "standard" implementation of va_start: just assign `nextarg' to
4437 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4439 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4440 convert_move (va_r
, nextarg
, 0);
4442 /* We do not have any valid bounds for the pointer, so
4443 just store zero bounds for it. */
4444 if (chkp_function_instrumented_p (current_function_decl
))
4445 chkp_expand_bounds_reset_for_mem (valist
,
4446 make_tree (TREE_TYPE (valist
),
4450 /* Expand EXP, a call to __builtin_va_start. */
4453 expand_builtin_va_start (tree exp
)
4457 location_t loc
= EXPR_LOCATION (exp
);
4459 if (call_expr_nargs (exp
) < 2)
4461 error_at (loc
, "too few arguments to function %<va_start%>");
4465 if (fold_builtin_next_arg (exp
, true))
4468 nextarg
= expand_builtin_next_arg ();
4469 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4471 if (targetm
.expand_builtin_va_start
)
4472 targetm
.expand_builtin_va_start (valist
, nextarg
);
4474 std_expand_builtin_va_start (valist
, nextarg
);
4479 /* Expand EXP, a call to __builtin_va_end. */
4482 expand_builtin_va_end (tree exp
)
4484 tree valist
= CALL_EXPR_ARG (exp
, 0);
4486 /* Evaluate for side effects, if needed. I hate macros that don't
4488 if (TREE_SIDE_EFFECTS (valist
))
4489 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4494 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4495 builtin rather than just as an assignment in stdarg.h because of the
4496 nastiness of array-type va_list types. */
4499 expand_builtin_va_copy (tree exp
)
4502 location_t loc
= EXPR_LOCATION (exp
);
4504 dst
= CALL_EXPR_ARG (exp
, 0);
4505 src
= CALL_EXPR_ARG (exp
, 1);
4507 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4508 src
= stabilize_va_list_loc (loc
, src
, 0);
4510 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4512 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4514 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4515 TREE_SIDE_EFFECTS (t
) = 1;
4516 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4520 rtx dstb
, srcb
, size
;
4522 /* Evaluate to pointers. */
4523 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4524 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4525 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4526 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4528 dstb
= convert_memory_address (Pmode
, dstb
);
4529 srcb
= convert_memory_address (Pmode
, srcb
);
4531 /* "Dereference" to BLKmode memories. */
4532 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4533 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4534 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4535 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4536 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4537 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4540 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4546 /* Expand a call to one of the builtin functions __builtin_frame_address or
4547 __builtin_return_address. */
4550 expand_builtin_frame_address (tree fndecl
, tree exp
)
4552 /* The argument must be a nonnegative integer constant.
4553 It counts the number of frames to scan up the stack.
4554 The value is the return address saved in that frame. */
4555 if (call_expr_nargs (exp
) == 0)
4556 /* Warning about missing arg was already issued. */
4558 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4560 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4561 error ("invalid argument to %<__builtin_frame_address%>");
4563 error ("invalid argument to %<__builtin_return_address%>");
4569 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4570 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4572 /* Some ports cannot access arbitrary stack frames. */
4575 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4576 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4578 warning (0, "unsupported argument to %<__builtin_return_address%>");
4582 /* For __builtin_frame_address, return what we've got. */
4583 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4587 && ! CONSTANT_P (tem
))
4588 tem
= copy_addr_to_reg (tem
);
4593 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4594 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4595 is the same as for allocate_dynamic_stack_space. */
4598 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4604 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4605 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4608 = (alloca_with_align
4609 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4610 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4615 /* Compute the argument. */
4616 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4618 /* Compute the alignment. */
4619 align
= (alloca_with_align
4620 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4621 : BIGGEST_ALIGNMENT
);
4623 /* Allocate the desired space. */
4624 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4625 result
= convert_memory_address (ptr_mode
, result
);
4630 /* Expand a call to bswap builtin in EXP.
4631 Return NULL_RTX if a normal call should be emitted rather than expanding the
4632 function in-line. If convenient, the result should be placed in TARGET.
4633 SUBTARGET may be used as the target for computing one of EXP's operands. */
4636 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4642 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4645 arg
= CALL_EXPR_ARG (exp
, 0);
4646 op0
= expand_expr (arg
,
4647 subtarget
&& GET_MODE (subtarget
) == target_mode
4648 ? subtarget
: NULL_RTX
,
4649 target_mode
, EXPAND_NORMAL
);
4650 if (GET_MODE (op0
) != target_mode
)
4651 op0
= convert_to_mode (target_mode
, op0
, 1);
4653 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4655 gcc_assert (target
);
4657 return convert_to_mode (target_mode
, target
, 1);
4660 /* Expand a call to a unary builtin in EXP.
4661 Return NULL_RTX if a normal call should be emitted rather than expanding the
4662 function in-line. If convenient, the result should be placed in TARGET.
4663 SUBTARGET may be used as the target for computing one of EXP's operands. */
4666 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4667 rtx subtarget
, optab op_optab
)
4671 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4674 /* Compute the argument. */
4675 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4677 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4678 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4679 VOIDmode
, EXPAND_NORMAL
);
4680 /* Compute op, into TARGET if possible.
4681 Set TARGET to wherever the result comes back. */
4682 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4683 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4684 gcc_assert (target
);
4686 return convert_to_mode (target_mode
, target
, 0);
4689 /* Expand a call to __builtin_expect. We just return our argument
4690 as the builtin_expect semantic should've been already executed by
4691 tree branch prediction pass. */
4694 expand_builtin_expect (tree exp
, rtx target
)
4698 if (call_expr_nargs (exp
) < 2)
4700 arg
= CALL_EXPR_ARG (exp
, 0);
4702 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4703 /* When guessing was done, the hints should be already stripped away. */
4704 gcc_assert (!flag_guess_branch_prob
4705 || optimize
== 0 || seen_error ());
4709 /* Expand a call to __builtin_assume_aligned. We just return our first
4710 argument as the builtin_assume_aligned semantic should've been already
4714 expand_builtin_assume_aligned (tree exp
, rtx target
)
4716 if (call_expr_nargs (exp
) < 2)
4718 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4720 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4721 && (call_expr_nargs (exp
) < 3
4722 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4727 expand_builtin_trap (void)
4729 if (targetm
.have_trap ())
4731 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4732 /* For trap insns when not accumulating outgoing args force
4733 REG_ARGS_SIZE note to prevent crossjumping of calls with
4734 different args sizes. */
4735 if (!ACCUMULATE_OUTGOING_ARGS
)
4736 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4739 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4743 /* Expand a call to __builtin_unreachable. We do nothing except emit
4744 a barrier saying that control flow will not pass here.
4746 It is the responsibility of the program being compiled to ensure
4747 that control flow does never reach __builtin_unreachable. */
4749 expand_builtin_unreachable (void)
4754 /* Expand EXP, a call to fabs, fabsf or fabsl.
4755 Return NULL_RTX if a normal call should be emitted rather than expanding
4756 the function inline. If convenient, the result should be placed
4757 in TARGET. SUBTARGET may be used as the target for computing
4761 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4767 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4770 arg
= CALL_EXPR_ARG (exp
, 0);
4771 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4772 mode
= TYPE_MODE (TREE_TYPE (arg
));
4773 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4774 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4777 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4778 Return NULL is a normal call should be emitted rather than expanding the
4779 function inline. If convenient, the result should be placed in TARGET.
4780 SUBTARGET may be used as the target for computing the operand. */
4783 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4788 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4791 arg
= CALL_EXPR_ARG (exp
, 0);
4792 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4794 arg
= CALL_EXPR_ARG (exp
, 1);
4795 op1
= expand_normal (arg
);
4797 return expand_copysign (op0
, op1
, target
);
4800 /* Expand a call to __builtin___clear_cache. */
4803 expand_builtin___clear_cache (tree exp
)
4805 if (!targetm
.code_for_clear_cache
)
4807 #ifdef CLEAR_INSN_CACHE
4808 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4809 does something. Just do the default expansion to a call to
4813 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4814 does nothing. There is no need to call it. Do nothing. */
4816 #endif /* CLEAR_INSN_CACHE */
4819 /* We have a "clear_cache" insn, and it will handle everything. */
4821 rtx begin_rtx
, end_rtx
;
4823 /* We must not expand to a library call. If we did, any
4824 fallback library function in libgcc that might contain a call to
4825 __builtin___clear_cache() would recurse infinitely. */
4826 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4828 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4832 if (targetm
.have_clear_cache ())
4834 struct expand_operand ops
[2];
4836 begin
= CALL_EXPR_ARG (exp
, 0);
4837 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4839 end
= CALL_EXPR_ARG (exp
, 1);
4840 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4842 create_address_operand (&ops
[0], begin_rtx
);
4843 create_address_operand (&ops
[1], end_rtx
);
4844 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4850 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4853 round_trampoline_addr (rtx tramp
)
4855 rtx temp
, addend
, mask
;
4857 /* If we don't need too much alignment, we'll have been guaranteed
4858 proper alignment by get_trampoline_type. */
4859 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4862 /* Round address up to desired boundary. */
4863 temp
= gen_reg_rtx (Pmode
);
4864 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4865 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4867 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4868 temp
, 0, OPTAB_LIB_WIDEN
);
4869 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4870 temp
, 0, OPTAB_LIB_WIDEN
);
4876 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4878 tree t_tramp
, t_func
, t_chain
;
4879 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4881 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4882 POINTER_TYPE
, VOID_TYPE
))
4885 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4886 t_func
= CALL_EXPR_ARG (exp
, 1);
4887 t_chain
= CALL_EXPR_ARG (exp
, 2);
4889 r_tramp
= expand_normal (t_tramp
);
4890 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4891 MEM_NOTRAP_P (m_tramp
) = 1;
4893 /* If ONSTACK, the TRAMP argument should be the address of a field
4894 within the local function's FRAME decl. Either way, let's see if
4895 we can fill in the MEM_ATTRs for this memory. */
4896 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4897 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4899 /* Creator of a heap trampoline is responsible for making sure the
4900 address is aligned to at least STACK_BOUNDARY. Normally malloc
4901 will ensure this anyhow. */
4902 tmp
= round_trampoline_addr (r_tramp
);
4905 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4906 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4907 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4910 /* The FUNC argument should be the address of the nested function.
4911 Extract the actual function decl to pass to the hook. */
4912 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4913 t_func
= TREE_OPERAND (t_func
, 0);
4914 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4916 r_chain
= expand_normal (t_chain
);
4918 /* Generate insns to initialize the trampoline. */
4919 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4923 trampolines_created
= 1;
4925 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4926 "trampoline generated for nested function %qD", t_func
);
4933 expand_builtin_adjust_trampoline (tree exp
)
4937 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4940 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4941 tramp
= round_trampoline_addr (tramp
);
4942 if (targetm
.calls
.trampoline_adjust_address
)
4943 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4948 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4949 function. The function first checks whether the back end provides
4950 an insn to implement signbit for the respective mode. If not, it
4951 checks whether the floating point format of the value is such that
4952 the sign bit can be extracted. If that is not the case, the
4953 function returns NULL_RTX to indicate that a normal call should be
4954 emitted rather than expanding the function in-line. EXP is the
4955 expression that is a call to the builtin function; if convenient,
4956 the result should be placed in TARGET. */
4958 expand_builtin_signbit (tree exp
, rtx target
)
4960 const struct real_format
*fmt
;
4961 machine_mode fmode
, imode
, rmode
;
4964 enum insn_code icode
;
4966 location_t loc
= EXPR_LOCATION (exp
);
4968 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4971 arg
= CALL_EXPR_ARG (exp
, 0);
4972 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4973 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4974 fmt
= REAL_MODE_FORMAT (fmode
);
4976 arg
= builtin_save_expr (arg
);
4978 /* Expand the argument yielding a RTX expression. */
4979 temp
= expand_normal (arg
);
4981 /* Check if the back end provides an insn that handles signbit for the
4983 icode
= optab_handler (signbit_optab
, fmode
);
4984 if (icode
!= CODE_FOR_nothing
)
4986 rtx_insn
*last
= get_last_insn ();
4987 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4988 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4990 delete_insns_since (last
);
4993 /* For floating point formats without a sign bit, implement signbit
4995 bitpos
= fmt
->signbit_ro
;
4998 /* But we can't do this if the format supports signed zero. */
4999 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5002 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5003 build_real (TREE_TYPE (arg
), dconst0
));
5004 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5007 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5009 imode
= int_mode_for_mode (fmode
);
5010 if (imode
== BLKmode
)
5012 temp
= gen_lowpart (imode
, temp
);
5017 /* Handle targets with different FP word orders. */
5018 if (FLOAT_WORDS_BIG_ENDIAN
)
5019 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5021 word
= bitpos
/ BITS_PER_WORD
;
5022 temp
= operand_subword_force (temp
, word
, fmode
);
5023 bitpos
= bitpos
% BITS_PER_WORD
;
5026 /* Force the intermediate word_mode (or narrower) result into a
5027 register. This avoids attempting to create paradoxical SUBREGs
5028 of floating point modes below. */
5029 temp
= force_reg (imode
, temp
);
5031 /* If the bitpos is within the "result mode" lowpart, the operation
5032 can be implement with a single bitwise AND. Otherwise, we need
5033 a right shift and an AND. */
5035 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5037 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5039 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5040 temp
= gen_lowpart (rmode
, temp
);
5041 temp
= expand_binop (rmode
, and_optab
, temp
,
5042 immed_wide_int_const (mask
, rmode
),
5043 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5047 /* Perform a logical right shift to place the signbit in the least
5048 significant bit, then truncate the result to the desired mode
5049 and mask just this bit. */
5050 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5051 temp
= gen_lowpart (rmode
, temp
);
5052 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5053 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5059 /* Expand fork or exec calls. TARGET is the desired target of the
5060 call. EXP is the call. FN is the
5061 identificator of the actual function. IGNORE is nonzero if the
5062 value is to be ignored. */
5065 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5070 /* If we are not profiling, just call the function. */
5071 if (!profile_arc_flag
)
5074 /* Otherwise call the wrapper. This should be equivalent for the rest of
5075 compiler, so the code does not diverge, and the wrapper may run the
5076 code necessary for keeping the profiling sane. */
5078 switch (DECL_FUNCTION_CODE (fn
))
5081 id
= get_identifier ("__gcov_fork");
5084 case BUILT_IN_EXECL
:
5085 id
= get_identifier ("__gcov_execl");
5088 case BUILT_IN_EXECV
:
5089 id
= get_identifier ("__gcov_execv");
5092 case BUILT_IN_EXECLP
:
5093 id
= get_identifier ("__gcov_execlp");
5096 case BUILT_IN_EXECLE
:
5097 id
= get_identifier ("__gcov_execle");
5100 case BUILT_IN_EXECVP
:
5101 id
= get_identifier ("__gcov_execvp");
5104 case BUILT_IN_EXECVE
:
5105 id
= get_identifier ("__gcov_execve");
5112 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5113 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5114 DECL_EXTERNAL (decl
) = 1;
5115 TREE_PUBLIC (decl
) = 1;
5116 DECL_ARTIFICIAL (decl
) = 1;
5117 TREE_NOTHROW (decl
) = 1;
5118 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5119 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5120 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5121 return expand_call (call
, target
, ignore
);
5126 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5127 the pointer in these functions is void*, the tree optimizers may remove
5128 casts. The mode computed in expand_builtin isn't reliable either, due
5129 to __sync_bool_compare_and_swap.
5131 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5132 group of builtins. This gives us log2 of the mode size. */
5134 static inline machine_mode
5135 get_builtin_sync_mode (int fcode_diff
)
5137 /* The size is not negotiable, so ask not to get BLKmode in return
5138 if the target indicates that a smaller size would be better. */
5139 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5142 /* Expand the memory expression LOC and return the appropriate memory operand
5143 for the builtin_sync operations. */
5146 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5150 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5151 addr
= convert_memory_address (Pmode
, addr
);
5153 /* Note that we explicitly do not want any alias information for this
5154 memory, so that we kill all other live memories. Otherwise we don't
5155 satisfy the full barrier semantics of the intrinsic. */
5156 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5158 /* The alignment needs to be at least according to that of the mode. */
5159 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5160 get_pointer_alignment (loc
)));
5161 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5162 MEM_VOLATILE_P (mem
) = 1;
5167 /* Make sure an argument is in the right mode.
5168 EXP is the tree argument.
5169 MODE is the mode it should be in. */
5172 expand_expr_force_mode (tree exp
, machine_mode mode
)
5175 machine_mode old_mode
;
5177 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5178 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5179 of CONST_INTs, where we know the old_mode only from the call argument. */
5181 old_mode
= GET_MODE (val
);
5182 if (old_mode
== VOIDmode
)
5183 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5184 val
= convert_modes (mode
, old_mode
, val
, 1);
5189 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5190 EXP is the CALL_EXPR. CODE is the rtx code
5191 that corresponds to the arithmetic or logical operation from the name;
5192 an exception here is that NOT actually means NAND. TARGET is an optional
5193 place for us to store the results; AFTER is true if this is the
5194 fetch_and_xxx form. */
5197 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5198 enum rtx_code code
, bool after
,
5202 location_t loc
= EXPR_LOCATION (exp
);
5204 if (code
== NOT
&& warn_sync_nand
)
5206 tree fndecl
= get_callee_fndecl (exp
);
5207 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5209 static bool warned_f_a_n
, warned_n_a_f
;
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5217 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5221 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5222 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5223 warned_f_a_n
= true;
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5230 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5234 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5235 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5236 warned_n_a_f
= true;
5244 /* Expand the operands. */
5245 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5246 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5248 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5252 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5253 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5254 true if this is the boolean form. TARGET is a place for us to store the
5255 results; this is NOT optional if IS_BOOL is true. */
5258 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5259 bool is_bool
, rtx target
)
5261 rtx old_val
, new_val
, mem
;
5264 /* Expand the operands. */
5265 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5266 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5267 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5269 pbool
= poval
= NULL
;
5270 if (target
!= const0_rtx
)
5277 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5278 false, MEMMODEL_SYNC_SEQ_CST
,
5279 MEMMODEL_SYNC_SEQ_CST
))
5285 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5286 general form is actually an atomic exchange, and some targets only
5287 support a reduced form with the second argument being a constant 1.
5288 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5292 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5297 /* Expand the operands. */
5298 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5299 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5301 return expand_sync_lock_test_and_set (target
, mem
, val
);
5304 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5307 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5311 /* Expand the operands. */
5312 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5314 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5317 /* Given an integer representing an ``enum memmodel'', verify its
5318 correctness and return the memory model enum. */
5320 static enum memmodel
5321 get_memmodel (tree exp
)
5324 unsigned HOST_WIDE_INT val
;
5326 /* If the parameter is not a constant, it's a run time value so we'll just
5327 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5328 if (TREE_CODE (exp
) != INTEGER_CST
)
5329 return MEMMODEL_SEQ_CST
;
5331 op
= expand_normal (exp
);
5334 if (targetm
.memmodel_check
)
5335 val
= targetm
.memmodel_check (val
);
5336 else if (val
& ~MEMMODEL_MASK
)
5338 warning (OPT_Winvalid_memory_model
,
5339 "Unknown architecture specifier in memory model to builtin.");
5340 return MEMMODEL_SEQ_CST
;
5343 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5344 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5346 warning (OPT_Winvalid_memory_model
,
5347 "invalid memory model argument to builtin");
5348 return MEMMODEL_SEQ_CST
;
5351 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5352 be conservative and promote consume to acquire. */
5353 if (val
== MEMMODEL_CONSUME
)
5354 val
= MEMMODEL_ACQUIRE
;
5356 return (enum memmodel
) val
;
5359 /* Expand the __atomic_exchange intrinsic:
5360 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5361 EXP is the CALL_EXPR.
5362 TARGET is an optional place for us to store the results. */
5365 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5368 enum memmodel model
;
5370 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5372 if (!flag_inline_atomics
)
5375 /* Expand the operands. */
5376 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5377 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5379 return expand_atomic_exchange (target
, mem
, val
, model
);
5382 /* Expand the __atomic_compare_exchange intrinsic:
5383 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5384 TYPE desired, BOOL weak,
5385 enum memmodel success,
5386 enum memmodel failure)
5387 EXP is the CALL_EXPR.
5388 TARGET is an optional place for us to store the results. */
5391 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5394 rtx expect
, desired
, mem
, oldval
;
5395 rtx_code_label
*label
;
5396 enum memmodel success
, failure
;
5400 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5401 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5403 if (failure
> success
)
5405 warning (OPT_Winvalid_memory_model
,
5406 "failure memory model cannot be stronger than success memory "
5407 "model for %<__atomic_compare_exchange%>");
5408 success
= MEMMODEL_SEQ_CST
;
5411 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5413 warning (OPT_Winvalid_memory_model
,
5414 "invalid failure memory model for "
5415 "%<__atomic_compare_exchange%>");
5416 failure
= MEMMODEL_SEQ_CST
;
5417 success
= MEMMODEL_SEQ_CST
;
5421 if (!flag_inline_atomics
)
5424 /* Expand the operands. */
5425 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5427 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5428 expect
= convert_memory_address (Pmode
, expect
);
5429 expect
= gen_rtx_MEM (mode
, expect
);
5430 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5432 weak
= CALL_EXPR_ARG (exp
, 3);
5434 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5437 if (target
== const0_rtx
)
5440 /* Lest the rtl backend create a race condition with an imporoper store
5441 to memory, always create a new pseudo for OLDVAL. */
5444 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5445 is_weak
, success
, failure
))
5448 /* Conditionally store back to EXPECT, lest we create a race condition
5449 with an improper store to memory. */
5450 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5451 the normal case where EXPECT is totally private, i.e. a register. At
5452 which point the store can be unconditional. */
5453 label
= gen_label_rtx ();
5454 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5455 GET_MODE (target
), 1, label
);
5456 emit_move_insn (expect
, oldval
);
5462 /* Expand the __atomic_load intrinsic:
5463 TYPE __atomic_load (TYPE *object, enum memmodel)
5464 EXP is the CALL_EXPR.
5465 TARGET is an optional place for us to store the results. */
5468 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5471 enum memmodel model
;
5473 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5474 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5476 warning (OPT_Winvalid_memory_model
,
5477 "invalid memory model for %<__atomic_load%>");
5478 model
= MEMMODEL_SEQ_CST
;
5481 if (!flag_inline_atomics
)
5484 /* Expand the operand. */
5485 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5487 return expand_atomic_load (target
, mem
, model
);
5491 /* Expand the __atomic_store intrinsic:
5492 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5493 EXP is the CALL_EXPR.
5494 TARGET is an optional place for us to store the results. */
5497 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5500 enum memmodel model
;
5502 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5503 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5504 || is_mm_release (model
)))
5506 warning (OPT_Winvalid_memory_model
,
5507 "invalid memory model for %<__atomic_store%>");
5508 model
= MEMMODEL_SEQ_CST
;
5511 if (!flag_inline_atomics
)
5514 /* Expand the operands. */
5515 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5516 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5518 return expand_atomic_store (mem
, val
, model
, false);
5521 /* Expand the __atomic_fetch_XXX intrinsic:
5522 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5523 EXP is the CALL_EXPR.
5524 TARGET is an optional place for us to store the results.
5525 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5526 FETCH_AFTER is true if returning the result of the operation.
5527 FETCH_AFTER is false if returning the value before the operation.
5528 IGNORE is true if the result is not used.
5529 EXT_CALL is the correct builtin for an external call if this cannot be
5530 resolved to an instruction sequence. */
5533 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5534 enum rtx_code code
, bool fetch_after
,
5535 bool ignore
, enum built_in_function ext_call
)
5538 enum memmodel model
;
5542 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5544 /* Expand the operands. */
5545 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5546 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5548 /* Only try generating instructions if inlining is turned on. */
5549 if (flag_inline_atomics
)
5551 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5556 /* Return if a different routine isn't needed for the library call. */
5557 if (ext_call
== BUILT_IN_NONE
)
5560 /* Change the call to the specified function. */
5561 fndecl
= get_callee_fndecl (exp
);
5562 addr
= CALL_EXPR_FN (exp
);
5565 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5566 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5568 /* Expand the call here so we can emit trailing code. */
5569 ret
= expand_call (exp
, target
, ignore
);
5571 /* Replace the original function just in case it matters. */
5572 TREE_OPERAND (addr
, 0) = fndecl
;
5574 /* Then issue the arithmetic correction to return the right result. */
5579 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5581 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5584 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5591 #ifndef HAVE_atomic_clear
5592 # define HAVE_atomic_clear 0
5593 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5596 /* Expand an atomic clear operation.
5597 void _atomic_clear (BOOL *obj, enum memmodel)
5598 EXP is the call expression. */
5601 expand_builtin_atomic_clear (tree exp
)
5605 enum memmodel model
;
5607 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5608 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5609 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5611 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5613 warning (OPT_Winvalid_memory_model
,
5614 "invalid memory model for %<__atomic_store%>");
5615 model
= MEMMODEL_SEQ_CST
;
5618 if (HAVE_atomic_clear
)
5620 emit_insn (gen_atomic_clear (mem
, model
));
5624 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5625 Failing that, a store is issued by __atomic_store. The only way this can
5626 fail is if the bool type is larger than a word size. Unlikely, but
5627 handle it anyway for completeness. Assume a single threaded model since
5628 there is no atomic support in this case, and no barriers are required. */
5629 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5631 emit_move_insn (mem
, const0_rtx
);
5635 /* Expand an atomic test_and_set operation.
5636 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5637 EXP is the call expression. */
5640 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5643 enum memmodel model
;
5646 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5647 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5648 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5650 return expand_atomic_test_and_set (target
, mem
, model
);
5654 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5655 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5658 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5662 unsigned int mode_align
, type_align
;
5664 if (TREE_CODE (arg0
) != INTEGER_CST
)
5667 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5668 mode
= mode_for_size (size
, MODE_INT
, 0);
5669 mode_align
= GET_MODE_ALIGNMENT (mode
);
5671 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5672 type_align
= mode_align
;
5675 tree ttype
= TREE_TYPE (arg1
);
5677 /* This function is usually invoked and folded immediately by the front
5678 end before anything else has a chance to look at it. The pointer
5679 parameter at this point is usually cast to a void *, so check for that
5680 and look past the cast. */
5681 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5682 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5683 arg1
= TREE_OPERAND (arg1
, 0);
5685 ttype
= TREE_TYPE (arg1
);
5686 gcc_assert (POINTER_TYPE_P (ttype
));
5688 /* Get the underlying type of the object. */
5689 ttype
= TREE_TYPE (ttype
);
5690 type_align
= TYPE_ALIGN (ttype
);
5693 /* If the object has smaller alignment, the the lock free routines cannot
5695 if (type_align
< mode_align
)
5696 return boolean_false_node
;
5698 /* Check if a compare_and_swap pattern exists for the mode which represents
5699 the required size. The pattern is not allowed to fail, so the existence
5700 of the pattern indicates support is present. */
5701 if (can_compare_and_swap_p (mode
, true))
5702 return boolean_true_node
;
5704 return boolean_false_node
;
5707 /* Return true if the parameters to call EXP represent an object which will
5708 always generate lock free instructions. The first argument represents the
5709 size of the object, and the second parameter is a pointer to the object
5710 itself. If NULL is passed for the object, then the result is based on
5711 typical alignment for an object of the specified size. Otherwise return
5715 expand_builtin_atomic_always_lock_free (tree exp
)
5718 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5719 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5721 if (TREE_CODE (arg0
) != INTEGER_CST
)
5723 error ("non-constant argument 1 to __atomic_always_lock_free");
5727 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5728 if (size
== boolean_true_node
)
5733 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5734 is lock free on this architecture. */
5737 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5739 if (!flag_inline_atomics
)
5742 /* If it isn't always lock free, don't generate a result. */
5743 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5744 return boolean_true_node
;
5749 /* Return true if the parameters to call EXP represent an object which will
5750 always generate lock free instructions. The first argument represents the
5751 size of the object, and the second parameter is a pointer to the object
5752 itself. If NULL is passed for the object, then the result is based on
5753 typical alignment for an object of the specified size. Otherwise return
5757 expand_builtin_atomic_is_lock_free (tree exp
)
5760 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5761 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5763 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5765 error ("non-integer argument 1 to __atomic_is_lock_free");
5769 if (!flag_inline_atomics
)
5772 /* If the value is known at compile time, return the RTX for it. */
5773 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5774 if (size
== boolean_true_node
)
5780 /* Expand the __atomic_thread_fence intrinsic:
5781 void __atomic_thread_fence (enum memmodel)
5782 EXP is the CALL_EXPR. */
5785 expand_builtin_atomic_thread_fence (tree exp
)
5787 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5788 expand_mem_thread_fence (model
);
5791 /* Expand the __atomic_signal_fence intrinsic:
5792 void __atomic_signal_fence (enum memmodel)
5793 EXP is the CALL_EXPR. */
5796 expand_builtin_atomic_signal_fence (tree exp
)
5798 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5799 expand_mem_signal_fence (model
);
5802 /* Expand the __sync_synchronize intrinsic. */
5805 expand_builtin_sync_synchronize (void)
5807 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5811 expand_builtin_thread_pointer (tree exp
, rtx target
)
5813 enum insn_code icode
;
5814 if (!validate_arglist (exp
, VOID_TYPE
))
5816 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5817 if (icode
!= CODE_FOR_nothing
)
5819 struct expand_operand op
;
5820 /* If the target is not sutitable then create a new target. */
5821 if (target
== NULL_RTX
5823 || GET_MODE (target
) != Pmode
)
5824 target
= gen_reg_rtx (Pmode
);
5825 create_output_operand (&op
, target
, Pmode
);
5826 expand_insn (icode
, 1, &op
);
5829 error ("__builtin_thread_pointer is not supported on this target");
5834 expand_builtin_set_thread_pointer (tree exp
)
5836 enum insn_code icode
;
5837 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5839 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5840 if (icode
!= CODE_FOR_nothing
)
5842 struct expand_operand op
;
5843 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5844 Pmode
, EXPAND_NORMAL
);
5845 create_input_operand (&op
, val
, Pmode
);
5846 expand_insn (icode
, 1, &op
);
5849 error ("__builtin_set_thread_pointer is not supported on this target");
5853 /* Emit code to restore the current value of stack. */
5856 expand_stack_restore (tree var
)
5859 rtx sa
= expand_normal (var
);
5861 sa
= convert_memory_address (Pmode
, sa
);
5863 prev
= get_last_insn ();
5864 emit_stack_restore (SAVE_BLOCK
, sa
);
5866 record_new_stack_level ();
5868 fixup_args_size_notes (prev
, get_last_insn (), 0);
5871 /* Emit code to save the current value of stack. */
5874 expand_stack_save (void)
5878 emit_stack_save (SAVE_BLOCK
, &ret
);
5883 /* Expand OpenACC acc_on_device.
5885 This has to happen late (that is, not in early folding; expand_builtin_*,
5886 rather than fold_builtin_*), as we have to act differently for host and
5887 acceleration device (ACCEL_COMPILER conditional). */
5890 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED
,
5891 rtx target ATTRIBUTE_UNUSED
)
5893 #ifdef ACCEL_COMPILER
5894 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5897 tree arg
= CALL_EXPR_ARG (exp
, 0);
5899 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5900 machine_mode v_mode
= TYPE_MODE (TREE_TYPE (arg
));
5901 rtx v
= expand_normal (arg
), v1
, v2
;
5902 v1
= GEN_INT (GOMP_DEVICE_NOT_HOST
);
5903 v2
= GEN_INT (ACCEL_COMPILER_acc_device
);
5904 machine_mode target_mode
= TYPE_MODE (integer_type_node
);
5905 if (!target
|| !register_operand (target
, target_mode
))
5906 target
= gen_reg_rtx (target_mode
);
5907 emit_move_insn (target
, const1_rtx
);
5908 rtx_code_label
*done_label
= gen_label_rtx ();
5909 do_compare_rtx_and_jump (v
, v1
, EQ
, false, v_mode
, NULL_RTX
,
5910 NULL
, done_label
, PROB_EVEN
);
5911 do_compare_rtx_and_jump (v
, v2
, EQ
, false, v_mode
, NULL_RTX
,
5912 NULL
, done_label
, PROB_EVEN
);
5913 emit_move_insn (target
, const0_rtx
);
5914 emit_label (done_label
);
5923 /* Expand an expression EXP that calls a built-in function,
5924 with result going to TARGET if that's convenient
5925 (and in mode MODE if that's convenient).
5926 SUBTARGET may be used as the target for computing one of EXP's operands.
5927 IGNORE is nonzero if the value is to be ignored. */
5930 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5933 tree fndecl
= get_callee_fndecl (exp
);
5934 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5935 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5938 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5939 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5941 /* When ASan is enabled, we don't want to expand some memory/string
5942 builtins and rely on libsanitizer's hooks. This allows us to avoid
5943 redundant checks and be sure, that possible overflow will be detected
5946 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5947 return expand_call (exp
, target
, ignore
);
5949 /* When not optimizing, generate calls to library functions for a certain
5952 && !called_as_built_in (fndecl
)
5953 && fcode
!= BUILT_IN_FORK
5954 && fcode
!= BUILT_IN_EXECL
5955 && fcode
!= BUILT_IN_EXECV
5956 && fcode
!= BUILT_IN_EXECLP
5957 && fcode
!= BUILT_IN_EXECLE
5958 && fcode
!= BUILT_IN_EXECVP
5959 && fcode
!= BUILT_IN_EXECVE
5960 && fcode
!= BUILT_IN_ALLOCA
5961 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5962 && fcode
!= BUILT_IN_FREE
5963 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5964 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5965 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5966 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5967 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5968 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5969 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5970 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5971 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5972 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5973 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5974 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5975 return expand_call (exp
, target
, ignore
);
5977 /* The built-in function expanders test for target == const0_rtx
5978 to determine whether the function's result will be ignored. */
5980 target
= const0_rtx
;
5982 /* If the result of a pure or const built-in function is ignored, and
5983 none of its arguments are volatile, we can avoid expanding the
5984 built-in call and just evaluate the arguments for side-effects. */
5985 if (target
== const0_rtx
5986 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5987 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5989 bool volatilep
= false;
5991 call_expr_arg_iterator iter
;
5993 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5994 if (TREE_THIS_VOLATILE (arg
))
6002 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6003 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6008 /* expand_builtin_with_bounds is supposed to be used for
6009 instrumented builtin calls. */
6010 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6014 CASE_FLT_FN (BUILT_IN_FABS
):
6015 case BUILT_IN_FABSD32
:
6016 case BUILT_IN_FABSD64
:
6017 case BUILT_IN_FABSD128
:
6018 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6023 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6024 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6029 /* Just do a normal library call if we were unable to fold
6031 CASE_FLT_FN (BUILT_IN_CABS
):
6034 CASE_FLT_FN (BUILT_IN_EXP
):
6035 CASE_FLT_FN (BUILT_IN_EXP10
):
6036 CASE_FLT_FN (BUILT_IN_POW10
):
6037 CASE_FLT_FN (BUILT_IN_EXP2
):
6038 CASE_FLT_FN (BUILT_IN_EXPM1
):
6039 CASE_FLT_FN (BUILT_IN_LOGB
):
6040 CASE_FLT_FN (BUILT_IN_LOG
):
6041 CASE_FLT_FN (BUILT_IN_LOG10
):
6042 CASE_FLT_FN (BUILT_IN_LOG2
):
6043 CASE_FLT_FN (BUILT_IN_LOG1P
):
6044 CASE_FLT_FN (BUILT_IN_TAN
):
6045 CASE_FLT_FN (BUILT_IN_ASIN
):
6046 CASE_FLT_FN (BUILT_IN_ACOS
):
6047 CASE_FLT_FN (BUILT_IN_ATAN
):
6048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
6049 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6050 because of possible accuracy problems. */
6051 if (! flag_unsafe_math_optimizations
)
6053 CASE_FLT_FN (BUILT_IN_SQRT
):
6054 CASE_FLT_FN (BUILT_IN_FLOOR
):
6055 CASE_FLT_FN (BUILT_IN_CEIL
):
6056 CASE_FLT_FN (BUILT_IN_TRUNC
):
6057 CASE_FLT_FN (BUILT_IN_ROUND
):
6058 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6059 CASE_FLT_FN (BUILT_IN_RINT
):
6060 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6065 CASE_FLT_FN (BUILT_IN_FMA
):
6066 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6071 CASE_FLT_FN (BUILT_IN_ILOGB
):
6072 if (! flag_unsafe_math_optimizations
)
6074 CASE_FLT_FN (BUILT_IN_ISINF
):
6075 CASE_FLT_FN (BUILT_IN_FINITE
):
6076 case BUILT_IN_ISFINITE
:
6077 case BUILT_IN_ISNORMAL
:
6078 target
= expand_builtin_interclass_mathfn (exp
, target
);
6083 CASE_FLT_FN (BUILT_IN_ICEIL
):
6084 CASE_FLT_FN (BUILT_IN_LCEIL
):
6085 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6086 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6087 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6088 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6089 target
= expand_builtin_int_roundingfn (exp
, target
);
6094 CASE_FLT_FN (BUILT_IN_IRINT
):
6095 CASE_FLT_FN (BUILT_IN_LRINT
):
6096 CASE_FLT_FN (BUILT_IN_LLRINT
):
6097 CASE_FLT_FN (BUILT_IN_IROUND
):
6098 CASE_FLT_FN (BUILT_IN_LROUND
):
6099 CASE_FLT_FN (BUILT_IN_LLROUND
):
6100 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6105 CASE_FLT_FN (BUILT_IN_POWI
):
6106 target
= expand_builtin_powi (exp
, target
);
6111 CASE_FLT_FN (BUILT_IN_ATAN2
):
6112 CASE_FLT_FN (BUILT_IN_LDEXP
):
6113 CASE_FLT_FN (BUILT_IN_SCALB
):
6114 CASE_FLT_FN (BUILT_IN_SCALBN
):
6115 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6116 if (! flag_unsafe_math_optimizations
)
6119 CASE_FLT_FN (BUILT_IN_FMOD
):
6120 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6121 CASE_FLT_FN (BUILT_IN_DREM
):
6122 CASE_FLT_FN (BUILT_IN_POW
):
6123 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6128 CASE_FLT_FN (BUILT_IN_CEXPI
):
6129 target
= expand_builtin_cexpi (exp
, target
);
6130 gcc_assert (target
);
6133 CASE_FLT_FN (BUILT_IN_SIN
):
6134 CASE_FLT_FN (BUILT_IN_COS
):
6135 if (! flag_unsafe_math_optimizations
)
6137 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6142 CASE_FLT_FN (BUILT_IN_SINCOS
):
6143 if (! flag_unsafe_math_optimizations
)
6145 target
= expand_builtin_sincos (exp
);
6150 case BUILT_IN_APPLY_ARGS
:
6151 return expand_builtin_apply_args ();
6153 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6154 FUNCTION with a copy of the parameters described by
6155 ARGUMENTS, and ARGSIZE. It returns a block of memory
6156 allocated on the stack into which is stored all the registers
6157 that might possibly be used for returning the result of a
6158 function. ARGUMENTS is the value returned by
6159 __builtin_apply_args. ARGSIZE is the number of bytes of
6160 arguments that must be copied. ??? How should this value be
6161 computed? We'll also need a safe worst case value for varargs
6163 case BUILT_IN_APPLY
:
6164 if (!validate_arglist (exp
, POINTER_TYPE
,
6165 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6166 && !validate_arglist (exp
, REFERENCE_TYPE
,
6167 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6173 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6174 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6175 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6177 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6180 /* __builtin_return (RESULT) causes the function to return the
6181 value described by RESULT. RESULT is address of the block of
6182 memory returned by __builtin_apply. */
6183 case BUILT_IN_RETURN
:
6184 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6185 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6188 case BUILT_IN_SAVEREGS
:
6189 return expand_builtin_saveregs ();
6191 case BUILT_IN_VA_ARG_PACK
:
6192 /* All valid uses of __builtin_va_arg_pack () are removed during
6194 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6197 case BUILT_IN_VA_ARG_PACK_LEN
:
6198 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6200 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6203 /* Return the address of the first anonymous stack arg. */
6204 case BUILT_IN_NEXT_ARG
:
6205 if (fold_builtin_next_arg (exp
, false))
6207 return expand_builtin_next_arg ();
6209 case BUILT_IN_CLEAR_CACHE
:
6210 target
= expand_builtin___clear_cache (exp
);
6215 case BUILT_IN_CLASSIFY_TYPE
:
6216 return expand_builtin_classify_type (exp
);
6218 case BUILT_IN_CONSTANT_P
:
6221 case BUILT_IN_FRAME_ADDRESS
:
6222 case BUILT_IN_RETURN_ADDRESS
:
6223 return expand_builtin_frame_address (fndecl
, exp
);
6225 /* Returns the address of the area where the structure is returned.
6227 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6228 if (call_expr_nargs (exp
) != 0
6229 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6230 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6233 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6235 case BUILT_IN_ALLOCA
:
6236 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6237 /* If the allocation stems from the declaration of a variable-sized
6238 object, it cannot accumulate. */
6239 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6244 case BUILT_IN_STACK_SAVE
:
6245 return expand_stack_save ();
6247 case BUILT_IN_STACK_RESTORE
:
6248 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6251 case BUILT_IN_BSWAP16
:
6252 case BUILT_IN_BSWAP32
:
6253 case BUILT_IN_BSWAP64
:
6254 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6259 CASE_INT_FN (BUILT_IN_FFS
):
6260 target
= expand_builtin_unop (target_mode
, exp
, target
,
6261 subtarget
, ffs_optab
);
6266 CASE_INT_FN (BUILT_IN_CLZ
):
6267 target
= expand_builtin_unop (target_mode
, exp
, target
,
6268 subtarget
, clz_optab
);
6273 CASE_INT_FN (BUILT_IN_CTZ
):
6274 target
= expand_builtin_unop (target_mode
, exp
, target
,
6275 subtarget
, ctz_optab
);
6280 CASE_INT_FN (BUILT_IN_CLRSB
):
6281 target
= expand_builtin_unop (target_mode
, exp
, target
,
6282 subtarget
, clrsb_optab
);
6287 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6288 target
= expand_builtin_unop (target_mode
, exp
, target
,
6289 subtarget
, popcount_optab
);
6294 CASE_INT_FN (BUILT_IN_PARITY
):
6295 target
= expand_builtin_unop (target_mode
, exp
, target
,
6296 subtarget
, parity_optab
);
6301 case BUILT_IN_STRLEN
:
6302 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6307 case BUILT_IN_STRCPY
:
6308 target
= expand_builtin_strcpy (exp
, target
);
6313 case BUILT_IN_STRNCPY
:
6314 target
= expand_builtin_strncpy (exp
, target
);
6319 case BUILT_IN_STPCPY
:
6320 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6325 case BUILT_IN_MEMCPY
:
6326 target
= expand_builtin_memcpy (exp
, target
);
6331 case BUILT_IN_MEMPCPY
:
6332 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6337 case BUILT_IN_MEMSET
:
6338 target
= expand_builtin_memset (exp
, target
, mode
);
6343 case BUILT_IN_BZERO
:
6344 target
= expand_builtin_bzero (exp
);
6349 case BUILT_IN_STRCMP
:
6350 target
= expand_builtin_strcmp (exp
, target
);
6355 case BUILT_IN_STRNCMP
:
6356 target
= expand_builtin_strncmp (exp
, target
, mode
);
6362 case BUILT_IN_MEMCMP
:
6363 target
= expand_builtin_memcmp (exp
, target
, mode
);
6368 case BUILT_IN_SETJMP
:
6369 /* This should have been lowered to the builtins below. */
6372 case BUILT_IN_SETJMP_SETUP
:
6373 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6374 and the receiver label. */
6375 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6377 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6378 VOIDmode
, EXPAND_NORMAL
);
6379 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6380 rtx_insn
*label_r
= label_rtx (label
);
6382 /* This is copied from the handling of non-local gotos. */
6383 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6384 nonlocal_goto_handler_labels
6385 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6386 nonlocal_goto_handler_labels
);
6387 /* ??? Do not let expand_label treat us as such since we would
6388 not want to be both on the list of non-local labels and on
6389 the list of forced labels. */
6390 FORCED_LABEL (label
) = 0;
6395 case BUILT_IN_SETJMP_RECEIVER
:
6396 /* __builtin_setjmp_receiver is passed the receiver label. */
6397 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6399 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6400 rtx_insn
*label_r
= label_rtx (label
);
6402 expand_builtin_setjmp_receiver (label_r
);
6407 /* __builtin_longjmp is passed a pointer to an array of five words.
6408 It's similar to the C library longjmp function but works with
6409 __builtin_setjmp above. */
6410 case BUILT_IN_LONGJMP
:
6411 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6413 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6414 VOIDmode
, EXPAND_NORMAL
);
6415 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6417 if (value
!= const1_rtx
)
6419 error ("%<__builtin_longjmp%> second argument must be 1");
6423 expand_builtin_longjmp (buf_addr
, value
);
6428 case BUILT_IN_NONLOCAL_GOTO
:
6429 target
= expand_builtin_nonlocal_goto (exp
);
6434 /* This updates the setjmp buffer that is its argument with the value
6435 of the current stack pointer. */
6436 case BUILT_IN_UPDATE_SETJMP_BUF
:
6437 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6440 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6442 expand_builtin_update_setjmp_buf (buf_addr
);
6448 expand_builtin_trap ();
6451 case BUILT_IN_UNREACHABLE
:
6452 expand_builtin_unreachable ();
6455 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6456 case BUILT_IN_SIGNBITD32
:
6457 case BUILT_IN_SIGNBITD64
:
6458 case BUILT_IN_SIGNBITD128
:
6459 target
= expand_builtin_signbit (exp
, target
);
6464 /* Various hooks for the DWARF 2 __throw routine. */
6465 case BUILT_IN_UNWIND_INIT
:
6466 expand_builtin_unwind_init ();
6468 case BUILT_IN_DWARF_CFA
:
6469 return virtual_cfa_rtx
;
6470 #ifdef DWARF2_UNWIND_INFO
6471 case BUILT_IN_DWARF_SP_COLUMN
:
6472 return expand_builtin_dwarf_sp_column ();
6473 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6474 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6477 case BUILT_IN_FROB_RETURN_ADDR
:
6478 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6479 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6480 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6481 case BUILT_IN_EH_RETURN
:
6482 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6483 CALL_EXPR_ARG (exp
, 1));
6485 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6486 return expand_builtin_eh_return_data_regno (exp
);
6487 case BUILT_IN_EXTEND_POINTER
:
6488 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6489 case BUILT_IN_EH_POINTER
:
6490 return expand_builtin_eh_pointer (exp
);
6491 case BUILT_IN_EH_FILTER
:
6492 return expand_builtin_eh_filter (exp
);
6493 case BUILT_IN_EH_COPY_VALUES
:
6494 return expand_builtin_eh_copy_values (exp
);
6496 case BUILT_IN_VA_START
:
6497 return expand_builtin_va_start (exp
);
6498 case BUILT_IN_VA_END
:
6499 return expand_builtin_va_end (exp
);
6500 case BUILT_IN_VA_COPY
:
6501 return expand_builtin_va_copy (exp
);
6502 case BUILT_IN_EXPECT
:
6503 return expand_builtin_expect (exp
, target
);
6504 case BUILT_IN_ASSUME_ALIGNED
:
6505 return expand_builtin_assume_aligned (exp
, target
);
6506 case BUILT_IN_PREFETCH
:
6507 expand_builtin_prefetch (exp
);
6510 case BUILT_IN_INIT_TRAMPOLINE
:
6511 return expand_builtin_init_trampoline (exp
, true);
6512 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6513 return expand_builtin_init_trampoline (exp
, false);
6514 case BUILT_IN_ADJUST_TRAMPOLINE
:
6515 return expand_builtin_adjust_trampoline (exp
);
6518 case BUILT_IN_EXECL
:
6519 case BUILT_IN_EXECV
:
6520 case BUILT_IN_EXECLP
:
6521 case BUILT_IN_EXECLE
:
6522 case BUILT_IN_EXECVP
:
6523 case BUILT_IN_EXECVE
:
6524 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6529 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6530 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6531 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6532 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6533 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6534 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6535 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6540 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6541 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6542 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6543 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6544 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6545 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6546 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6551 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6552 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6553 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6554 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6555 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6556 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6557 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6562 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6563 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6564 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6565 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6566 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6567 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6568 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6573 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6574 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6575 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6576 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6577 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6578 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6579 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6584 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6585 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6586 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6587 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6588 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6589 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6590 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6595 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6596 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6597 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6598 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6599 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6600 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6601 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6606 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6607 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6608 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6609 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6610 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6611 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6612 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6617 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6618 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6619 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6620 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6621 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6622 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6623 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6628 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6629 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6630 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6631 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6632 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6633 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6634 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6639 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6640 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6641 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6642 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6643 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6644 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6645 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6650 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6651 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6652 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6653 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6654 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6655 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6656 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6661 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6662 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6663 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6664 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6665 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6666 if (mode
== VOIDmode
)
6667 mode
= TYPE_MODE (boolean_type_node
);
6668 if (!target
|| !register_operand (target
, mode
))
6669 target
= gen_reg_rtx (mode
);
6671 mode
= get_builtin_sync_mode
6672 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6673 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6678 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6679 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6680 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6681 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6682 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6683 mode
= get_builtin_sync_mode
6684 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6685 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6692 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6693 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6695 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6696 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6701 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6702 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6703 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6704 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6705 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6706 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6707 expand_builtin_sync_lock_release (mode
, exp
);
6710 case BUILT_IN_SYNC_SYNCHRONIZE
:
6711 expand_builtin_sync_synchronize ();
6714 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6715 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6716 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6717 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6718 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6719 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6720 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6725 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6729 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6731 unsigned int nargs
, z
;
6732 vec
<tree
, va_gc
> *vec
;
6735 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6736 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6740 /* If this is turned into an external library call, the weak parameter
6741 must be dropped to match the expected parameter list. */
6742 nargs
= call_expr_nargs (exp
);
6743 vec_alloc (vec
, nargs
- 1);
6744 for (z
= 0; z
< 3; z
++)
6745 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6746 /* Skip the boolean weak parameter. */
6747 for (z
= 4; z
< 6; z
++)
6748 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6749 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6753 case BUILT_IN_ATOMIC_LOAD_1
:
6754 case BUILT_IN_ATOMIC_LOAD_2
:
6755 case BUILT_IN_ATOMIC_LOAD_4
:
6756 case BUILT_IN_ATOMIC_LOAD_8
:
6757 case BUILT_IN_ATOMIC_LOAD_16
:
6758 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6759 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6764 case BUILT_IN_ATOMIC_STORE_1
:
6765 case BUILT_IN_ATOMIC_STORE_2
:
6766 case BUILT_IN_ATOMIC_STORE_4
:
6767 case BUILT_IN_ATOMIC_STORE_8
:
6768 case BUILT_IN_ATOMIC_STORE_16
:
6769 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6770 target
= expand_builtin_atomic_store (mode
, exp
);
6775 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6776 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6777 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6778 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6779 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6781 enum built_in_function lib
;
6782 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6783 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6784 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6785 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6791 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6792 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6793 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6794 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6795 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6797 enum built_in_function lib
;
6798 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6799 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6800 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6801 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6807 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6808 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6809 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6810 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6811 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6813 enum built_in_function lib
;
6814 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6815 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6816 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6817 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6823 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6824 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6825 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6826 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6827 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6829 enum built_in_function lib
;
6830 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6831 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6832 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6833 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6839 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6840 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6841 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6842 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6843 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6845 enum built_in_function lib
;
6846 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6847 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6848 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6849 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6855 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6856 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6857 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6858 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6859 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6861 enum built_in_function lib
;
6862 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6863 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6864 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6865 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6871 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6872 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6873 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6874 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6875 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6876 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6877 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6878 ignore
, BUILT_IN_NONE
);
6883 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6884 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6885 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6886 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6887 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6888 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6889 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6890 ignore
, BUILT_IN_NONE
);
6895 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6896 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6897 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6898 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6899 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6900 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6901 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6902 ignore
, BUILT_IN_NONE
);
6907 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6908 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6909 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6910 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6911 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6912 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6913 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6914 ignore
, BUILT_IN_NONE
);
6919 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6920 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6921 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6922 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6923 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6924 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6925 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6926 ignore
, BUILT_IN_NONE
);
6931 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6932 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6933 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6934 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6935 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6936 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6937 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6938 ignore
, BUILT_IN_NONE
);
6943 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6944 return expand_builtin_atomic_test_and_set (exp
, target
);
6946 case BUILT_IN_ATOMIC_CLEAR
:
6947 return expand_builtin_atomic_clear (exp
);
6949 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6950 return expand_builtin_atomic_always_lock_free (exp
);
6952 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6953 target
= expand_builtin_atomic_is_lock_free (exp
);
6958 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6959 expand_builtin_atomic_thread_fence (exp
);
6962 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6963 expand_builtin_atomic_signal_fence (exp
);
6966 case BUILT_IN_OBJECT_SIZE
:
6967 return expand_builtin_object_size (exp
);
6969 case BUILT_IN_MEMCPY_CHK
:
6970 case BUILT_IN_MEMPCPY_CHK
:
6971 case BUILT_IN_MEMMOVE_CHK
:
6972 case BUILT_IN_MEMSET_CHK
:
6973 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6978 case BUILT_IN_STRCPY_CHK
:
6979 case BUILT_IN_STPCPY_CHK
:
6980 case BUILT_IN_STRNCPY_CHK
:
6981 case BUILT_IN_STPNCPY_CHK
:
6982 case BUILT_IN_STRCAT_CHK
:
6983 case BUILT_IN_STRNCAT_CHK
:
6984 case BUILT_IN_SNPRINTF_CHK
:
6985 case BUILT_IN_VSNPRINTF_CHK
:
6986 maybe_emit_chk_warning (exp
, fcode
);
6989 case BUILT_IN_SPRINTF_CHK
:
6990 case BUILT_IN_VSPRINTF_CHK
:
6991 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6995 if (warn_free_nonheap_object
)
6996 maybe_emit_free_warning (exp
);
6999 case BUILT_IN_THREAD_POINTER
:
7000 return expand_builtin_thread_pointer (exp
, target
);
7002 case BUILT_IN_SET_THREAD_POINTER
:
7003 expand_builtin_set_thread_pointer (exp
);
7006 case BUILT_IN_CILK_DETACH
:
7007 expand_builtin_cilk_detach (exp
);
7010 case BUILT_IN_CILK_POP_FRAME
:
7011 expand_builtin_cilk_pop_frame (exp
);
7014 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7015 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7016 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7017 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7018 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7019 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7020 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7021 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7022 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7023 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7024 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7025 /* We allow user CHKP builtins if Pointer Bounds
7027 if (!chkp_function_instrumented_p (current_function_decl
))
7029 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7030 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7031 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7032 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7033 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7034 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7035 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7036 return expand_normal (size_zero_node
);
7037 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7038 return expand_normal (size_int (-1));
7044 case BUILT_IN_CHKP_BNDMK
:
7045 case BUILT_IN_CHKP_BNDSTX
:
7046 case BUILT_IN_CHKP_BNDCL
:
7047 case BUILT_IN_CHKP_BNDCU
:
7048 case BUILT_IN_CHKP_BNDLDX
:
7049 case BUILT_IN_CHKP_BNDRET
:
7050 case BUILT_IN_CHKP_INTERSECT
:
7051 case BUILT_IN_CHKP_NARROW
:
7052 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7053 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7054 /* Software implementation of Pointer Bounds Checker is NYI.
7055 Target support is required. */
7056 error ("Your target platform does not support -fcheck-pointer-bounds");
7059 case BUILT_IN_ACC_ON_DEVICE
:
7060 target
= expand_builtin_acc_on_device (exp
, target
);
7065 default: /* just do library call, if unknown builtin */
7069 /* The switch statement above can drop through to cause the function
7070 to be called normally. */
7071 return expand_call (exp
, target
, ignore
);
7074 /* Similar to expand_builtin but is used for instrumented calls. */
7077 expand_builtin_with_bounds (tree exp
, rtx target
,
7078 rtx subtarget ATTRIBUTE_UNUSED
,
7079 machine_mode mode
, int ignore
)
7081 tree fndecl
= get_callee_fndecl (exp
);
7082 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7084 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7086 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7087 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7089 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7090 && fcode
< END_CHKP_BUILTINS
);
7094 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7095 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7100 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7101 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7106 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7107 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7116 /* The switch statement above can drop through to cause the function
7117 to be called normally. */
7118 return expand_call (exp
, target
, ignore
);
7121 /* Determine whether a tree node represents a call to a built-in
7122 function. If the tree T is a call to a built-in function with
7123 the right number of arguments of the appropriate types, return
7124 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7125 Otherwise the return value is END_BUILTINS. */
7127 enum built_in_function
7128 builtin_mathfn_code (const_tree t
)
7130 const_tree fndecl
, arg
, parmlist
;
7131 const_tree argtype
, parmtype
;
7132 const_call_expr_arg_iterator iter
;
7134 if (TREE_CODE (t
) != CALL_EXPR
7135 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7136 return END_BUILTINS
;
7138 fndecl
= get_callee_fndecl (t
);
7139 if (fndecl
== NULL_TREE
7140 || TREE_CODE (fndecl
) != FUNCTION_DECL
7141 || ! DECL_BUILT_IN (fndecl
)
7142 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7143 return END_BUILTINS
;
7145 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7146 init_const_call_expr_arg_iterator (t
, &iter
);
7147 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7149 /* If a function doesn't take a variable number of arguments,
7150 the last element in the list will have type `void'. */
7151 parmtype
= TREE_VALUE (parmlist
);
7152 if (VOID_TYPE_P (parmtype
))
7154 if (more_const_call_expr_args_p (&iter
))
7155 return END_BUILTINS
;
7156 return DECL_FUNCTION_CODE (fndecl
);
7159 if (! more_const_call_expr_args_p (&iter
))
7160 return END_BUILTINS
;
7162 arg
= next_const_call_expr_arg (&iter
);
7163 argtype
= TREE_TYPE (arg
);
7165 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7167 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7168 return END_BUILTINS
;
7170 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7172 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7173 return END_BUILTINS
;
7175 else if (POINTER_TYPE_P (parmtype
))
7177 if (! POINTER_TYPE_P (argtype
))
7178 return END_BUILTINS
;
7180 else if (INTEGRAL_TYPE_P (parmtype
))
7182 if (! INTEGRAL_TYPE_P (argtype
))
7183 return END_BUILTINS
;
7186 return END_BUILTINS
;
7189 /* Variable-length argument list. */
7190 return DECL_FUNCTION_CODE (fndecl
);
7193 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7194 evaluate to a constant. */
7197 fold_builtin_constant_p (tree arg
)
7199 /* We return 1 for a numeric type that's known to be a constant
7200 value at compile-time or for an aggregate type that's a
7201 literal constant. */
7204 /* If we know this is a constant, emit the constant of one. */
7205 if (CONSTANT_CLASS_P (arg
)
7206 || (TREE_CODE (arg
) == CONSTRUCTOR
7207 && TREE_CONSTANT (arg
)))
7208 return integer_one_node
;
7209 if (TREE_CODE (arg
) == ADDR_EXPR
)
7211 tree op
= TREE_OPERAND (arg
, 0);
7212 if (TREE_CODE (op
) == STRING_CST
7213 || (TREE_CODE (op
) == ARRAY_REF
7214 && integer_zerop (TREE_OPERAND (op
, 1))
7215 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7216 return integer_one_node
;
7219 /* If this expression has side effects, show we don't know it to be a
7220 constant. Likewise if it's a pointer or aggregate type since in
7221 those case we only want literals, since those are only optimized
7222 when generating RTL, not later.
7223 And finally, if we are compiling an initializer, not code, we
7224 need to return a definite result now; there's not going to be any
7225 more optimization done. */
7226 if (TREE_SIDE_EFFECTS (arg
)
7227 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7228 || POINTER_TYPE_P (TREE_TYPE (arg
))
7230 || folding_initializer
7231 || force_folding_builtin_constant_p
)
7232 return integer_zero_node
;
7237 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7238 return it as a truthvalue. */
7241 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7244 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7246 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7247 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7248 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7249 pred_type
= TREE_VALUE (arg_types
);
7250 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7252 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7253 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7254 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7257 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7258 build_int_cst (ret_type
, 0));
7261 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7262 NULL_TREE if no simplification is possible. */
7265 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7267 tree inner
, fndecl
, inner_arg0
;
7268 enum tree_code code
;
7270 /* Distribute the expected value over short-circuiting operators.
7271 See through the cast from truthvalue_type_node to long. */
7273 while (CONVERT_EXPR_P (inner_arg0
)
7274 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7275 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7276 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7278 /* If this is a builtin_expect within a builtin_expect keep the
7279 inner one. See through a comparison against a constant. It
7280 might have been added to create a thruthvalue. */
7283 if (COMPARISON_CLASS_P (inner
)
7284 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7285 inner
= TREE_OPERAND (inner
, 0);
7287 if (TREE_CODE (inner
) == CALL_EXPR
7288 && (fndecl
= get_callee_fndecl (inner
))
7289 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7290 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7294 code
= TREE_CODE (inner
);
7295 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7297 tree op0
= TREE_OPERAND (inner
, 0);
7298 tree op1
= TREE_OPERAND (inner
, 1);
7300 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7301 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7302 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7304 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7307 /* If the argument isn't invariant then there's nothing else we can do. */
7308 if (!TREE_CONSTANT (inner_arg0
))
7311 /* If we expect that a comparison against the argument will fold to
7312 a constant return the constant. In practice, this means a true
7313 constant or the address of a non-weak symbol. */
7316 if (TREE_CODE (inner
) == ADDR_EXPR
)
7320 inner
= TREE_OPERAND (inner
, 0);
7322 while (TREE_CODE (inner
) == COMPONENT_REF
7323 || TREE_CODE (inner
) == ARRAY_REF
);
7324 if ((TREE_CODE (inner
) == VAR_DECL
7325 || TREE_CODE (inner
) == FUNCTION_DECL
)
7326 && DECL_WEAK (inner
))
7330 /* Otherwise, ARG0 already has the proper type for the return value. */
7334 /* Fold a call to __builtin_classify_type with argument ARG. */
7337 fold_builtin_classify_type (tree arg
)
7340 return build_int_cst (integer_type_node
, no_type_class
);
7342 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7345 /* Fold a call to __builtin_strlen with argument ARG. */
7348 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7350 if (!validate_arg (arg
, POINTER_TYPE
))
7354 tree len
= c_strlen (arg
, 0);
7357 return fold_convert_loc (loc
, type
, len
);
7363 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7366 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7368 REAL_VALUE_TYPE real
;
7370 /* __builtin_inff is intended to be usable to define INFINITY on all
7371 targets. If an infinity is not available, INFINITY expands "to a
7372 positive constant of type float that overflows at translation
7373 time", footnote "In this case, using INFINITY will violate the
7374 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7375 Thus we pedwarn to ensure this constraint violation is
7377 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7378 pedwarn (loc
, 0, "target format does not support infinity");
7381 return build_real (type
, real
);
7384 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7387 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7389 REAL_VALUE_TYPE real
;
7392 if (!validate_arg (arg
, POINTER_TYPE
))
7394 str
= c_getstr (arg
);
7398 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7401 return build_real (type
, real
);
7404 /* Return true if the floating point expression T has an integer value.
7405 We also allow +Inf, -Inf and NaN to be considered integer values. */
7408 integer_valued_real_p (tree t
)
7410 switch (TREE_CODE (t
))
7417 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7422 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7429 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7430 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7433 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7434 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7437 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7441 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7442 if (TREE_CODE (type
) == INTEGER_TYPE
)
7444 if (TREE_CODE (type
) == REAL_TYPE
)
7445 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7450 switch (builtin_mathfn_code (t
))
7452 CASE_FLT_FN (BUILT_IN_CEIL
):
7453 CASE_FLT_FN (BUILT_IN_FLOOR
):
7454 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7455 CASE_FLT_FN (BUILT_IN_RINT
):
7456 CASE_FLT_FN (BUILT_IN_ROUND
):
7457 CASE_FLT_FN (BUILT_IN_TRUNC
):
7460 CASE_FLT_FN (BUILT_IN_FMIN
):
7461 CASE_FLT_FN (BUILT_IN_FMAX
):
7462 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7463 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7476 /* FNDECL is assumed to be a builtin where truncation can be propagated
7477 across (for instance floor((double)f) == (double)floorf (f).
7478 Do the transformation for a call with argument ARG. */
7481 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7483 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7485 if (!validate_arg (arg
, REAL_TYPE
))
7488 /* Integer rounding functions are idempotent. */
7489 if (fcode
== builtin_mathfn_code (arg
))
7492 /* If argument is already integer valued, and we don't need to worry
7493 about setting errno, there's no need to perform rounding. */
7494 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7499 tree arg0
= strip_float_extensions (arg
);
7500 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7501 tree newtype
= TREE_TYPE (arg0
);
7504 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7505 && (decl
= mathfn_built_in (newtype
, fcode
)))
7506 return fold_convert_loc (loc
, ftype
,
7507 build_call_expr_loc (loc
, decl
, 1,
7508 fold_convert_loc (loc
,
7515 /* FNDECL is assumed to be builtin which can narrow the FP type of
7516 the argument, for instance lround((double)f) -> lroundf (f).
7517 Do the transformation for a call with argument ARG. */
7520 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7522 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7524 if (!validate_arg (arg
, REAL_TYPE
))
7527 /* If argument is already integer valued, and we don't need to worry
7528 about setting errno, there's no need to perform rounding. */
7529 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7530 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7531 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7535 tree ftype
= TREE_TYPE (arg
);
7536 tree arg0
= strip_float_extensions (arg
);
7537 tree newtype
= TREE_TYPE (arg0
);
7540 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7541 && (decl
= mathfn_built_in (newtype
, fcode
)))
7542 return build_call_expr_loc (loc
, decl
, 1,
7543 fold_convert_loc (loc
, newtype
, arg0
));
7546 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7547 sizeof (int) == sizeof (long). */
7548 if (TYPE_PRECISION (integer_type_node
)
7549 == TYPE_PRECISION (long_integer_type_node
))
7551 tree newfn
= NULL_TREE
;
7554 CASE_FLT_FN (BUILT_IN_ICEIL
):
7555 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7558 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7559 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7562 CASE_FLT_FN (BUILT_IN_IROUND
):
7563 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7566 CASE_FLT_FN (BUILT_IN_IRINT
):
7567 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7576 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7577 return fold_convert_loc (loc
,
7578 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7582 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7583 sizeof (long long) == sizeof (long). */
7584 if (TYPE_PRECISION (long_long_integer_type_node
)
7585 == TYPE_PRECISION (long_integer_type_node
))
7587 tree newfn
= NULL_TREE
;
7590 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7591 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7594 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7595 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7598 CASE_FLT_FN (BUILT_IN_LLROUND
):
7599 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7602 CASE_FLT_FN (BUILT_IN_LLRINT
):
7603 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7612 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7613 return fold_convert_loc (loc
,
7614 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7621 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7622 return type. Return NULL_TREE if no simplification can be made. */
7625 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7629 if (!validate_arg (arg
, COMPLEX_TYPE
)
7630 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7633 /* Calculate the result when the argument is a constant. */
7634 if (TREE_CODE (arg
) == COMPLEX_CST
7635 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7639 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7641 tree real
= TREE_OPERAND (arg
, 0);
7642 tree imag
= TREE_OPERAND (arg
, 1);
7644 /* If either part is zero, cabs is fabs of the other. */
7645 if (real_zerop (real
))
7646 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7647 if (real_zerop (imag
))
7648 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7650 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7651 if (flag_unsafe_math_optimizations
7652 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7654 const REAL_VALUE_TYPE sqrt2_trunc
7655 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7657 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7658 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7659 build_real (type
, sqrt2_trunc
));
7663 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7664 if (TREE_CODE (arg
) == NEGATE_EXPR
7665 || TREE_CODE (arg
) == CONJ_EXPR
)
7666 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7668 /* Don't do this when optimizing for size. */
7669 if (flag_unsafe_math_optimizations
7670 && optimize
&& optimize_function_for_speed_p (cfun
))
7672 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7674 if (sqrtfn
!= NULL_TREE
)
7676 tree rpart
, ipart
, result
;
7678 arg
= builtin_save_expr (arg
);
7680 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7681 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7683 rpart
= builtin_save_expr (rpart
);
7684 ipart
= builtin_save_expr (ipart
);
7686 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7687 fold_build2_loc (loc
, MULT_EXPR
, type
,
7689 fold_build2_loc (loc
, MULT_EXPR
, type
,
7692 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7699 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7700 complex tree type of the result. If NEG is true, the imaginary
7701 zero is negative. */
7704 build_complex_cproj (tree type
, bool neg
)
7706 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7710 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7711 build_real (TREE_TYPE (type
), rzero
));
7714 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7715 return type. Return NULL_TREE if no simplification can be made. */
7718 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7720 if (!validate_arg (arg
, COMPLEX_TYPE
)
7721 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7724 /* If there are no infinities, return arg. */
7725 if (! HONOR_INFINITIES (type
))
7726 return non_lvalue_loc (loc
, arg
);
7728 /* Calculate the result when the argument is a constant. */
7729 if (TREE_CODE (arg
) == COMPLEX_CST
)
7731 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7732 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7734 if (real_isinf (real
) || real_isinf (imag
))
7735 return build_complex_cproj (type
, imag
->sign
);
7739 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7741 tree real
= TREE_OPERAND (arg
, 0);
7742 tree imag
= TREE_OPERAND (arg
, 1);
7747 /* If the real part is inf and the imag part is known to be
7748 nonnegative, return (inf + 0i). Remember side-effects are
7749 possible in the imag part. */
7750 if (TREE_CODE (real
) == REAL_CST
7751 && real_isinf (TREE_REAL_CST_PTR (real
))
7752 && tree_expr_nonnegative_p (imag
))
7753 return omit_one_operand_loc (loc
, type
,
7754 build_complex_cproj (type
, false),
7757 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7758 Remember side-effects are possible in the real part. */
7759 if (TREE_CODE (imag
) == REAL_CST
7760 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7762 omit_one_operand_loc (loc
, type
,
7763 build_complex_cproj (type
, TREE_REAL_CST_PTR
7764 (imag
)->sign
), arg
);
7770 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7771 Return NULL_TREE if no simplification can be made. */
7774 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7777 enum built_in_function fcode
;
7780 if (!validate_arg (arg
, REAL_TYPE
))
7783 /* Calculate the result when the argument is a constant. */
7784 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7787 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7788 fcode
= builtin_mathfn_code (arg
);
7789 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7791 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7792 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7793 CALL_EXPR_ARG (arg
, 0),
7794 build_real (type
, dconsthalf
));
7795 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7798 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7799 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7801 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7805 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7807 /* The inner root was either sqrt or cbrt. */
7808 /* This was a conditional expression but it triggered a bug
7810 REAL_VALUE_TYPE dconstroot
;
7811 if (BUILTIN_SQRT_P (fcode
))
7812 dconstroot
= dconsthalf
;
7814 dconstroot
= dconst_third ();
7816 /* Adjust for the outer root. */
7817 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7818 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7819 tree_root
= build_real (type
, dconstroot
);
7820 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7824 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7825 if (flag_unsafe_math_optimizations
7826 && (fcode
== BUILT_IN_POW
7827 || fcode
== BUILT_IN_POWF
7828 || fcode
== BUILT_IN_POWL
))
7830 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7831 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7832 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7834 if (!tree_expr_nonnegative_p (arg0
))
7835 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7836 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7837 build_real (type
, dconsthalf
));
7838 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7844 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7845 Return NULL_TREE if no simplification can be made. */
7848 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7850 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7853 if (!validate_arg (arg
, REAL_TYPE
))
7856 /* Calculate the result when the argument is a constant. */
7857 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7860 if (flag_unsafe_math_optimizations
)
7862 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7863 if (BUILTIN_EXPONENT_P (fcode
))
7865 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7866 const REAL_VALUE_TYPE third_trunc
=
7867 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7868 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7869 CALL_EXPR_ARG (arg
, 0),
7870 build_real (type
, third_trunc
));
7871 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7874 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7875 if (BUILTIN_SQRT_P (fcode
))
7877 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7881 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7883 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7885 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7886 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7887 tree_root
= build_real (type
, dconstroot
);
7888 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7892 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7893 if (BUILTIN_CBRT_P (fcode
))
7895 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7896 if (tree_expr_nonnegative_p (arg0
))
7898 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7903 REAL_VALUE_TYPE dconstroot
;
7905 real_arithmetic (&dconstroot
, MULT_EXPR
,
7906 dconst_third_ptr (), dconst_third_ptr ());
7907 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7908 tree_root
= build_real (type
, dconstroot
);
7909 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7914 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7915 if (fcode
== BUILT_IN_POW
7916 || fcode
== BUILT_IN_POWF
7917 || fcode
== BUILT_IN_POWL
)
7919 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7920 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7921 if (tree_expr_nonnegative_p (arg00
))
7923 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7924 const REAL_VALUE_TYPE dconstroot
7925 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7926 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7927 build_real (type
, dconstroot
));
7928 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7935 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7936 TYPE is the type of the return value. Return NULL_TREE if no
7937 simplification can be made. */
7940 fold_builtin_cos (location_t loc
,
7941 tree arg
, tree type
, tree fndecl
)
7945 if (!validate_arg (arg
, REAL_TYPE
))
7948 /* Calculate the result when the argument is a constant. */
7949 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7952 /* Optimize cos(-x) into cos (x). */
7953 if ((narg
= fold_strip_sign_ops (arg
)))
7954 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7959 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7960 Return NULL_TREE if no simplification can be made. */
7963 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7965 if (validate_arg (arg
, REAL_TYPE
))
7969 /* Calculate the result when the argument is a constant. */
7970 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7973 /* Optimize cosh(-x) into cosh (x). */
7974 if ((narg
= fold_strip_sign_ops (arg
)))
7975 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7981 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7982 argument ARG. TYPE is the type of the return value. Return
7983 NULL_TREE if no simplification can be made. */
7986 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7989 if (validate_arg (arg
, COMPLEX_TYPE
)
7990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7994 /* Calculate the result when the argument is a constant. */
7995 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7998 /* Optimize fn(-x) into fn(x). */
7999 if ((tmp
= fold_strip_sign_ops (arg
)))
8000 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
8006 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8007 Return NULL_TREE if no simplification can be made. */
8010 fold_builtin_tan (tree arg
, tree type
)
8012 enum built_in_function fcode
;
8015 if (!validate_arg (arg
, REAL_TYPE
))
8018 /* Calculate the result when the argument is a constant. */
8019 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
8022 /* Optimize tan(atan(x)) = x. */
8023 fcode
= builtin_mathfn_code (arg
);
8024 if (flag_unsafe_math_optimizations
8025 && (fcode
== BUILT_IN_ATAN
8026 || fcode
== BUILT_IN_ATANF
8027 || fcode
== BUILT_IN_ATANL
))
8028 return CALL_EXPR_ARG (arg
, 0);
8033 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8034 NULL_TREE if no simplification can be made. */
8037 fold_builtin_sincos (location_t loc
,
8038 tree arg0
, tree arg1
, tree arg2
)
8043 if (!validate_arg (arg0
, REAL_TYPE
)
8044 || !validate_arg (arg1
, POINTER_TYPE
)
8045 || !validate_arg (arg2
, POINTER_TYPE
))
8048 type
= TREE_TYPE (arg0
);
8050 /* Calculate the result when the argument is a constant. */
8051 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
8054 /* Canonicalize sincos to cexpi. */
8055 if (!targetm
.libc_has_function (function_c99_math_complex
))
8057 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8061 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8062 call
= builtin_save_expr (call
);
8064 return build2 (COMPOUND_EXPR
, void_type_node
,
8065 build2 (MODIFY_EXPR
, void_type_node
,
8066 build_fold_indirect_ref_loc (loc
, arg1
),
8067 build1 (IMAGPART_EXPR
, type
, call
)),
8068 build2 (MODIFY_EXPR
, void_type_node
,
8069 build_fold_indirect_ref_loc (loc
, arg2
),
8070 build1 (REALPART_EXPR
, type
, call
)));
8073 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8074 NULL_TREE if no simplification can be made. */
8077 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8080 tree realp
, imagp
, ifn
;
8083 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8084 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8087 /* Calculate the result when the argument is a constant. */
8088 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8091 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8093 /* In case we can figure out the real part of arg0 and it is constant zero
8095 if (!targetm
.libc_has_function (function_c99_math_complex
))
8097 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8101 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8102 && real_zerop (realp
))
8104 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8105 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8108 /* In case we can easily decompose real and imaginary parts split cexp
8109 to exp (r) * cexpi (i). */
8110 if (flag_unsafe_math_optimizations
8113 tree rfn
, rcall
, icall
;
8115 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8119 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8123 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8124 icall
= builtin_save_expr (icall
);
8125 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8126 rcall
= builtin_save_expr (rcall
);
8127 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8128 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8130 fold_build1_loc (loc
, REALPART_EXPR
,
8132 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8134 fold_build1_loc (loc
, IMAGPART_EXPR
,
8141 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8142 Return NULL_TREE if no simplification can be made. */
8145 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8147 if (!validate_arg (arg
, REAL_TYPE
))
8150 /* Optimize trunc of constant value. */
8151 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8153 REAL_VALUE_TYPE r
, x
;
8154 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8156 x
= TREE_REAL_CST (arg
);
8157 real_trunc (&r
, TYPE_MODE (type
), &x
);
8158 return build_real (type
, r
);
8161 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8164 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8165 Return NULL_TREE if no simplification can be made. */
8168 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8170 if (!validate_arg (arg
, REAL_TYPE
))
8173 /* Optimize floor of constant value. */
8174 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8178 x
= TREE_REAL_CST (arg
);
8179 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8181 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8184 real_floor (&r
, TYPE_MODE (type
), &x
);
8185 return build_real (type
, r
);
8189 /* Fold floor (x) where x is nonnegative to trunc (x). */
8190 if (tree_expr_nonnegative_p (arg
))
8192 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8194 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8197 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8200 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8201 Return NULL_TREE if no simplification can be made. */
8204 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8206 if (!validate_arg (arg
, REAL_TYPE
))
8209 /* Optimize ceil of constant value. */
8210 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8214 x
= TREE_REAL_CST (arg
);
8215 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8217 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8220 real_ceil (&r
, TYPE_MODE (type
), &x
);
8221 return build_real (type
, r
);
8225 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8228 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8229 Return NULL_TREE if no simplification can be made. */
8232 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8234 if (!validate_arg (arg
, REAL_TYPE
))
8237 /* Optimize round of constant value. */
8238 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8242 x
= TREE_REAL_CST (arg
);
8243 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8245 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8248 real_round (&r
, TYPE_MODE (type
), &x
);
8249 return build_real (type
, r
);
8253 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8256 /* Fold function call to builtin lround, lroundf or lroundl (or the
8257 corresponding long long versions) and other rounding functions. ARG
8258 is the argument to the call. Return NULL_TREE if no simplification
8262 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8264 if (!validate_arg (arg
, REAL_TYPE
))
8267 /* Optimize lround of constant value. */
8268 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8270 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8272 if (real_isfinite (&x
))
8274 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8275 tree ftype
= TREE_TYPE (arg
);
8279 switch (DECL_FUNCTION_CODE (fndecl
))
8281 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8282 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8283 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8284 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8287 CASE_FLT_FN (BUILT_IN_ICEIL
):
8288 CASE_FLT_FN (BUILT_IN_LCEIL
):
8289 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8290 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8293 CASE_FLT_FN (BUILT_IN_IROUND
):
8294 CASE_FLT_FN (BUILT_IN_LROUND
):
8295 CASE_FLT_FN (BUILT_IN_LLROUND
):
8296 real_round (&r
, TYPE_MODE (ftype
), &x
);
8303 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8305 return wide_int_to_tree (itype
, val
);
8309 switch (DECL_FUNCTION_CODE (fndecl
))
8311 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8312 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8313 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8314 if (tree_expr_nonnegative_p (arg
))
8315 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8316 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8321 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8324 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8325 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8326 the argument to the call. Return NULL_TREE if no simplification can
8330 fold_builtin_bitop (tree fndecl
, tree arg
)
8332 if (!validate_arg (arg
, INTEGER_TYPE
))
8335 /* Optimize for constant argument. */
8336 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8338 tree type
= TREE_TYPE (arg
);
8341 switch (DECL_FUNCTION_CODE (fndecl
))
8343 CASE_INT_FN (BUILT_IN_FFS
):
8344 result
= wi::ffs (arg
);
8347 CASE_INT_FN (BUILT_IN_CLZ
):
8348 if (wi::ne_p (arg
, 0))
8349 result
= wi::clz (arg
);
8350 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8351 result
= TYPE_PRECISION (type
);
8354 CASE_INT_FN (BUILT_IN_CTZ
):
8355 if (wi::ne_p (arg
, 0))
8356 result
= wi::ctz (arg
);
8357 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8358 result
= TYPE_PRECISION (type
);
8361 CASE_INT_FN (BUILT_IN_CLRSB
):
8362 result
= wi::clrsb (arg
);
8365 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8366 result
= wi::popcount (arg
);
8369 CASE_INT_FN (BUILT_IN_PARITY
):
8370 result
= wi::parity (arg
);
8377 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8383 /* Fold function call to builtin_bswap and the short, long and long long
8384 variants. Return NULL_TREE if no simplification can be made. */
8386 fold_builtin_bswap (tree fndecl
, tree arg
)
8388 if (! validate_arg (arg
, INTEGER_TYPE
))
8391 /* Optimize constant value. */
8392 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8394 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8396 switch (DECL_FUNCTION_CODE (fndecl
))
8398 case BUILT_IN_BSWAP16
:
8399 case BUILT_IN_BSWAP32
:
8400 case BUILT_IN_BSWAP64
:
8402 signop sgn
= TYPE_SIGN (type
);
8404 wide_int_to_tree (type
,
8405 wide_int::from (arg
, TYPE_PRECISION (type
),
8417 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8418 NULL_TREE if no simplification can be made. */
8421 fold_builtin_hypot (location_t loc
, tree fndecl
,
8422 tree arg0
, tree arg1
, tree type
)
8424 tree res
, narg0
, narg1
;
8426 if (!validate_arg (arg0
, REAL_TYPE
)
8427 || !validate_arg (arg1
, REAL_TYPE
))
8430 /* Calculate the result when the argument is a constant. */
8431 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8434 /* If either argument to hypot has a negate or abs, strip that off.
8435 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8436 narg0
= fold_strip_sign_ops (arg0
);
8437 narg1
= fold_strip_sign_ops (arg1
);
8440 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8441 narg1
? narg1
: arg1
);
8444 /* If either argument is zero, hypot is fabs of the other. */
8445 if (real_zerop (arg0
))
8446 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8447 else if (real_zerop (arg1
))
8448 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8450 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8451 if (flag_unsafe_math_optimizations
8452 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8454 const REAL_VALUE_TYPE sqrt2_trunc
8455 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8456 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8457 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8458 build_real (type
, sqrt2_trunc
));
8465 /* Fold a builtin function call to pow, powf, or powl. Return
8466 NULL_TREE if no simplification can be made. */
8468 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8472 if (!validate_arg (arg0
, REAL_TYPE
)
8473 || !validate_arg (arg1
, REAL_TYPE
))
8476 /* Calculate the result when the argument is a constant. */
8477 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8480 /* Optimize pow(1.0,y) = 1.0. */
8481 if (real_onep (arg0
))
8482 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8484 if (TREE_CODE (arg1
) == REAL_CST
8485 && !TREE_OVERFLOW (arg1
))
8487 REAL_VALUE_TYPE cint
;
8491 c
= TREE_REAL_CST (arg1
);
8493 /* Optimize pow(x,0.0) = 1.0. */
8494 if (REAL_VALUES_EQUAL (c
, dconst0
))
8495 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8498 /* Optimize pow(x,1.0) = x. */
8499 if (REAL_VALUES_EQUAL (c
, dconst1
))
8502 /* Optimize pow(x,-1.0) = 1.0/x. */
8503 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8504 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8505 build_real (type
, dconst1
), arg0
);
8507 /* Optimize pow(x,0.5) = sqrt(x). */
8508 if (flag_unsafe_math_optimizations
8509 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8511 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8513 if (sqrtfn
!= NULL_TREE
)
8514 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8517 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8518 if (flag_unsafe_math_optimizations
)
8520 const REAL_VALUE_TYPE dconstroot
8521 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8523 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8525 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8526 if (cbrtfn
!= NULL_TREE
)
8527 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8531 /* Check for an integer exponent. */
8532 n
= real_to_integer (&c
);
8533 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8534 if (real_identical (&c
, &cint
))
8536 /* Attempt to evaluate pow at compile-time, unless this should
8537 raise an exception. */
8538 if (TREE_CODE (arg0
) == REAL_CST
8539 && !TREE_OVERFLOW (arg0
)
8541 || (!flag_trapping_math
&& !flag_errno_math
)
8542 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8547 x
= TREE_REAL_CST (arg0
);
8548 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8549 if (flag_unsafe_math_optimizations
|| !inexact
)
8550 return build_real (type
, x
);
8553 /* Strip sign ops from even integer powers. */
8554 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8556 tree narg0
= fold_strip_sign_ops (arg0
);
8558 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8563 if (flag_unsafe_math_optimizations
)
8565 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8567 /* Optimize pow(expN(x),y) = expN(x*y). */
8568 if (BUILTIN_EXPONENT_P (fcode
))
8570 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8571 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8572 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8573 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8576 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8577 if (BUILTIN_SQRT_P (fcode
))
8579 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8580 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8581 build_real (type
, dconsthalf
));
8582 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8585 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8586 if (BUILTIN_CBRT_P (fcode
))
8588 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8589 if (tree_expr_nonnegative_p (arg
))
8591 const REAL_VALUE_TYPE dconstroot
8592 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8593 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8594 build_real (type
, dconstroot
));
8595 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8599 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8600 if (fcode
== BUILT_IN_POW
8601 || fcode
== BUILT_IN_POWF
8602 || fcode
== BUILT_IN_POWL
)
8604 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8605 if (tree_expr_nonnegative_p (arg00
))
8607 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8608 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8609 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8617 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8618 Return NULL_TREE if no simplification can be made. */
8620 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8621 tree arg0
, tree arg1
, tree type
)
8623 if (!validate_arg (arg0
, REAL_TYPE
)
8624 || !validate_arg (arg1
, INTEGER_TYPE
))
8627 /* Optimize pow(1.0,y) = 1.0. */
8628 if (real_onep (arg0
))
8629 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8631 if (tree_fits_shwi_p (arg1
))
8633 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8635 /* Evaluate powi at compile-time. */
8636 if (TREE_CODE (arg0
) == REAL_CST
8637 && !TREE_OVERFLOW (arg0
))
8640 x
= TREE_REAL_CST (arg0
);
8641 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8642 return build_real (type
, x
);
8645 /* Optimize pow(x,0) = 1.0. */
8647 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8650 /* Optimize pow(x,1) = x. */
8654 /* Optimize pow(x,-1) = 1.0/x. */
8656 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8657 build_real (type
, dconst1
), arg0
);
8663 /* A subroutine of fold_builtin to fold the various exponent
8664 functions. Return NULL_TREE if no simplification can be made.
8665 FUNC is the corresponding MPFR exponent function. */
8668 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8669 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8671 if (validate_arg (arg
, REAL_TYPE
))
8673 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8676 /* Calculate the result when the argument is a constant. */
8677 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8680 /* Optimize expN(logN(x)) = x. */
8681 if (flag_unsafe_math_optimizations
)
8683 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8685 if ((func
== mpfr_exp
8686 && (fcode
== BUILT_IN_LOG
8687 || fcode
== BUILT_IN_LOGF
8688 || fcode
== BUILT_IN_LOGL
))
8689 || (func
== mpfr_exp2
8690 && (fcode
== BUILT_IN_LOG2
8691 || fcode
== BUILT_IN_LOG2F
8692 || fcode
== BUILT_IN_LOG2L
))
8693 || (func
== mpfr_exp10
8694 && (fcode
== BUILT_IN_LOG10
8695 || fcode
== BUILT_IN_LOG10F
8696 || fcode
== BUILT_IN_LOG10L
)))
8697 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8704 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8705 arguments to the call, and TYPE is its return type.
8706 Return NULL_TREE if no simplification can be made. */
8709 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8711 if (!validate_arg (arg1
, POINTER_TYPE
)
8712 || !validate_arg (arg2
, INTEGER_TYPE
)
8713 || !validate_arg (len
, INTEGER_TYPE
))
8719 if (TREE_CODE (arg2
) != INTEGER_CST
8720 || !tree_fits_uhwi_p (len
))
8723 p1
= c_getstr (arg1
);
8724 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8730 if (target_char_cast (arg2
, &c
))
8733 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8736 return build_int_cst (TREE_TYPE (arg1
), 0);
8738 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8739 return fold_convert_loc (loc
, type
, tem
);
8745 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8746 Return NULL_TREE if no simplification can be made. */
8749 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8751 const char *p1
, *p2
;
8753 if (!validate_arg (arg1
, POINTER_TYPE
)
8754 || !validate_arg (arg2
, POINTER_TYPE
)
8755 || !validate_arg (len
, INTEGER_TYPE
))
8758 /* If the LEN parameter is zero, return zero. */
8759 if (integer_zerop (len
))
8760 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8763 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8764 if (operand_equal_p (arg1
, arg2
, 0))
8765 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8767 p1
= c_getstr (arg1
);
8768 p2
= c_getstr (arg2
);
8770 /* If all arguments are constant, and the value of len is not greater
8771 than the lengths of arg1 and arg2, evaluate at compile-time. */
8772 if (tree_fits_uhwi_p (len
) && p1
&& p2
8773 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8774 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8776 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8779 return integer_one_node
;
8781 return integer_minus_one_node
;
8783 return integer_zero_node
;
8786 /* If len parameter is one, return an expression corresponding to
8787 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8788 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8790 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8791 tree cst_uchar_ptr_node
8792 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8795 = fold_convert_loc (loc
, integer_type_node
,
8796 build1 (INDIRECT_REF
, cst_uchar_node
,
8797 fold_convert_loc (loc
,
8801 = fold_convert_loc (loc
, integer_type_node
,
8802 build1 (INDIRECT_REF
, cst_uchar_node
,
8803 fold_convert_loc (loc
,
8806 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8812 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8813 Return NULL_TREE if no simplification can be made. */
8816 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8818 const char *p1
, *p2
;
8820 if (!validate_arg (arg1
, POINTER_TYPE
)
8821 || !validate_arg (arg2
, POINTER_TYPE
))
8824 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8825 if (operand_equal_p (arg1
, arg2
, 0))
8826 return integer_zero_node
;
8828 p1
= c_getstr (arg1
);
8829 p2
= c_getstr (arg2
);
8833 const int i
= strcmp (p1
, p2
);
8835 return integer_minus_one_node
;
8837 return integer_one_node
;
8839 return integer_zero_node
;
8842 /* If the second arg is "", return *(const unsigned char*)arg1. */
8843 if (p2
&& *p2
== '\0')
8845 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8846 tree cst_uchar_ptr_node
8847 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8849 return fold_convert_loc (loc
, integer_type_node
,
8850 build1 (INDIRECT_REF
, cst_uchar_node
,
8851 fold_convert_loc (loc
,
8856 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8857 if (p1
&& *p1
== '\0')
8859 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8860 tree cst_uchar_ptr_node
8861 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8864 = fold_convert_loc (loc
, integer_type_node
,
8865 build1 (INDIRECT_REF
, cst_uchar_node
,
8866 fold_convert_loc (loc
,
8869 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8875 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8876 Return NULL_TREE if no simplification can be made. */
8879 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8881 const char *p1
, *p2
;
8883 if (!validate_arg (arg1
, POINTER_TYPE
)
8884 || !validate_arg (arg2
, POINTER_TYPE
)
8885 || !validate_arg (len
, INTEGER_TYPE
))
8888 /* If the LEN parameter is zero, return zero. */
8889 if (integer_zerop (len
))
8890 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8893 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8894 if (operand_equal_p (arg1
, arg2
, 0))
8895 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8897 p1
= c_getstr (arg1
);
8898 p2
= c_getstr (arg2
);
8900 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8902 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8904 return integer_one_node
;
8906 return integer_minus_one_node
;
8908 return integer_zero_node
;
8911 /* If the second arg is "", and the length is greater than zero,
8912 return *(const unsigned char*)arg1. */
8913 if (p2
&& *p2
== '\0'
8914 && TREE_CODE (len
) == INTEGER_CST
8915 && tree_int_cst_sgn (len
) == 1)
8917 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8918 tree cst_uchar_ptr_node
8919 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8921 return fold_convert_loc (loc
, integer_type_node
,
8922 build1 (INDIRECT_REF
, cst_uchar_node
,
8923 fold_convert_loc (loc
,
8928 /* If the first arg is "", and the length is greater than zero,
8929 return -*(const unsigned char*)arg2. */
8930 if (p1
&& *p1
== '\0'
8931 && TREE_CODE (len
) == INTEGER_CST
8932 && tree_int_cst_sgn (len
) == 1)
8934 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8935 tree cst_uchar_ptr_node
8936 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8938 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8939 build1 (INDIRECT_REF
, cst_uchar_node
,
8940 fold_convert_loc (loc
,
8943 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8946 /* If len parameter is one, return an expression corresponding to
8947 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8948 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8950 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8951 tree cst_uchar_ptr_node
8952 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8954 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8955 build1 (INDIRECT_REF
, cst_uchar_node
,
8956 fold_convert_loc (loc
,
8959 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8960 build1 (INDIRECT_REF
, cst_uchar_node
,
8961 fold_convert_loc (loc
,
8964 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8970 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8971 ARG. Return NULL_TREE if no simplification can be made. */
8974 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8976 if (!validate_arg (arg
, REAL_TYPE
))
8979 /* If ARG is a compile-time constant, determine the result. */
8980 if (TREE_CODE (arg
) == REAL_CST
8981 && !TREE_OVERFLOW (arg
))
8985 c
= TREE_REAL_CST (arg
);
8986 return (REAL_VALUE_NEGATIVE (c
)
8987 ? build_one_cst (type
)
8988 : build_zero_cst (type
));
8991 /* If ARG is non-negative, the result is always zero. */
8992 if (tree_expr_nonnegative_p (arg
))
8993 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8995 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8996 if (!HONOR_SIGNED_ZEROS (arg
))
8997 return fold_convert (type
,
8998 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8999 build_real (TREE_TYPE (arg
), dconst0
)));
9004 /* Fold function call to builtin copysign, copysignf or copysignl with
9005 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9009 fold_builtin_copysign (location_t loc
, tree fndecl
,
9010 tree arg1
, tree arg2
, tree type
)
9014 if (!validate_arg (arg1
, REAL_TYPE
)
9015 || !validate_arg (arg2
, REAL_TYPE
))
9018 /* copysign(X,X) is X. */
9019 if (operand_equal_p (arg1
, arg2
, 0))
9020 return fold_convert_loc (loc
, type
, arg1
);
9022 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9023 if (TREE_CODE (arg1
) == REAL_CST
9024 && TREE_CODE (arg2
) == REAL_CST
9025 && !TREE_OVERFLOW (arg1
)
9026 && !TREE_OVERFLOW (arg2
))
9028 REAL_VALUE_TYPE c1
, c2
;
9030 c1
= TREE_REAL_CST (arg1
);
9031 c2
= TREE_REAL_CST (arg2
);
9032 /* c1.sign := c2.sign. */
9033 real_copysign (&c1
, &c2
);
9034 return build_real (type
, c1
);
9037 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9038 Remember to evaluate Y for side-effects. */
9039 if (tree_expr_nonnegative_p (arg2
))
9040 return omit_one_operand_loc (loc
, type
,
9041 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9044 /* Strip sign changing operations for the first argument. */
9045 tem
= fold_strip_sign_ops (arg1
);
9047 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9052 /* Fold a call to builtin isascii with argument ARG. */
9055 fold_builtin_isascii (location_t loc
, tree arg
)
9057 if (!validate_arg (arg
, INTEGER_TYPE
))
9061 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9062 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9063 build_int_cst (integer_type_node
,
9064 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9065 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9066 arg
, integer_zero_node
);
9070 /* Fold a call to builtin toascii with argument ARG. */
9073 fold_builtin_toascii (location_t loc
, tree arg
)
9075 if (!validate_arg (arg
, INTEGER_TYPE
))
9078 /* Transform toascii(c) -> (c & 0x7f). */
9079 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9080 build_int_cst (integer_type_node
, 0x7f));
9083 /* Fold a call to builtin isdigit with argument ARG. */
9086 fold_builtin_isdigit (location_t loc
, tree arg
)
9088 if (!validate_arg (arg
, INTEGER_TYPE
))
9092 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9093 /* According to the C standard, isdigit is unaffected by locale.
9094 However, it definitely is affected by the target character set. */
9095 unsigned HOST_WIDE_INT target_digit0
9096 = lang_hooks
.to_target_charset ('0');
9098 if (target_digit0
== 0)
9101 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9102 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9103 build_int_cst (unsigned_type_node
, target_digit0
));
9104 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9105 build_int_cst (unsigned_type_node
, 9));
9109 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9112 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9114 if (!validate_arg (arg
, REAL_TYPE
))
9117 arg
= fold_convert_loc (loc
, type
, arg
);
9118 if (TREE_CODE (arg
) == REAL_CST
)
9119 return fold_abs_const (arg
, type
);
9120 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9123 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9126 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9128 if (!validate_arg (arg
, INTEGER_TYPE
))
9131 arg
= fold_convert_loc (loc
, type
, arg
);
9132 if (TREE_CODE (arg
) == INTEGER_CST
)
9133 return fold_abs_const (arg
, type
);
9134 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9137 /* Fold a fma operation with arguments ARG[012]. */
9140 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9141 tree type
, tree arg0
, tree arg1
, tree arg2
)
9143 if (TREE_CODE (arg0
) == REAL_CST
9144 && TREE_CODE (arg1
) == REAL_CST
9145 && TREE_CODE (arg2
) == REAL_CST
)
9146 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9151 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9154 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9156 if (validate_arg (arg0
, REAL_TYPE
)
9157 && validate_arg (arg1
, REAL_TYPE
)
9158 && validate_arg (arg2
, REAL_TYPE
))
9160 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9164 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9165 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9166 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9171 /* Fold a call to builtin fmin or fmax. */
9174 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9175 tree type
, bool max
)
9177 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9179 /* Calculate the result when the argument is a constant. */
9180 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9185 /* If either argument is NaN, return the other one. Avoid the
9186 transformation if we get (and honor) a signalling NaN. Using
9187 omit_one_operand() ensures we create a non-lvalue. */
9188 if (TREE_CODE (arg0
) == REAL_CST
9189 && real_isnan (&TREE_REAL_CST (arg0
))
9190 && (! HONOR_SNANS (arg0
)
9191 || ! TREE_REAL_CST (arg0
).signalling
))
9192 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9193 if (TREE_CODE (arg1
) == REAL_CST
9194 && real_isnan (&TREE_REAL_CST (arg1
))
9195 && (! HONOR_SNANS (arg1
)
9196 || ! TREE_REAL_CST (arg1
).signalling
))
9197 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9199 /* Transform fmin/fmax(x,x) -> x. */
9200 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9201 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9203 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9204 functions to return the numeric arg if the other one is NaN.
9205 These tree codes don't honor that, so only transform if
9206 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9207 handled, so we don't have to worry about it either. */
9208 if (flag_finite_math_only
)
9209 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9210 fold_convert_loc (loc
, type
, arg0
),
9211 fold_convert_loc (loc
, type
, arg1
));
9216 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9219 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9221 if (validate_arg (arg
, COMPLEX_TYPE
)
9222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9224 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9228 tree new_arg
= builtin_save_expr (arg
);
9229 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9230 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9231 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9238 /* Fold a call to builtin logb/ilogb. */
9241 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9243 if (! validate_arg (arg
, REAL_TYPE
))
9248 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9250 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9256 /* If arg is Inf or NaN and we're logb, return it. */
9257 if (TREE_CODE (rettype
) == REAL_TYPE
)
9259 /* For logb(-Inf) we have to return +Inf. */
9260 if (real_isinf (value
) && real_isneg (value
))
9262 REAL_VALUE_TYPE tem
;
9264 return build_real (rettype
, tem
);
9266 return fold_convert_loc (loc
, rettype
, arg
);
9268 /* Fall through... */
9270 /* Zero may set errno and/or raise an exception for logb, also
9271 for ilogb we don't know FP_ILOGB0. */
9274 /* For normal numbers, proceed iff radix == 2. In GCC,
9275 normalized significands are in the range [0.5, 1.0). We
9276 want the exponent as if they were [1.0, 2.0) so get the
9277 exponent and subtract 1. */
9278 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9279 return fold_convert_loc (loc
, rettype
,
9280 build_int_cst (integer_type_node
,
9281 REAL_EXP (value
)-1));
9289 /* Fold a call to builtin significand, if radix == 2. */
9292 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9294 if (! validate_arg (arg
, REAL_TYPE
))
9299 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9301 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9308 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9309 return fold_convert_loc (loc
, rettype
, arg
);
9311 /* For normal numbers, proceed iff radix == 2. */
9312 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9314 REAL_VALUE_TYPE result
= *value
;
9315 /* In GCC, normalized significands are in the range [0.5,
9316 1.0). We want them to be [1.0, 2.0) so set the
9318 SET_REAL_EXP (&result
, 1);
9319 return build_real (rettype
, result
);
9328 /* Fold a call to builtin frexp, we can assume the base is 2. */
9331 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9333 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9338 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9341 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9343 /* Proceed if a valid pointer type was passed in. */
9344 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9346 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9352 /* For +-0, return (*exp = 0, +-0). */
9353 exp
= integer_zero_node
;
9358 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9359 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9362 /* Since the frexp function always expects base 2, and in
9363 GCC normalized significands are already in the range
9364 [0.5, 1.0), we have exactly what frexp wants. */
9365 REAL_VALUE_TYPE frac_rvt
= *value
;
9366 SET_REAL_EXP (&frac_rvt
, 0);
9367 frac
= build_real (rettype
, frac_rvt
);
9368 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9375 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9376 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9377 TREE_SIDE_EFFECTS (arg1
) = 1;
9378 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9384 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9385 then we can assume the base is two. If it's false, then we have to
9386 check the mode of the TYPE parameter in certain cases. */
9389 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9390 tree type
, bool ldexp
)
9392 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9397 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9398 if (real_zerop (arg0
) || integer_zerop (arg1
)
9399 || (TREE_CODE (arg0
) == REAL_CST
9400 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9401 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9403 /* If both arguments are constant, then try to evaluate it. */
9404 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9405 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9406 && tree_fits_shwi_p (arg1
))
9408 /* Bound the maximum adjustment to twice the range of the
9409 mode's valid exponents. Use abs to ensure the range is
9410 positive as a sanity check. */
9411 const long max_exp_adj
= 2 *
9412 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9413 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9415 /* Get the user-requested adjustment. */
9416 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9418 /* The requested adjustment must be inside this range. This
9419 is a preliminary cap to avoid things like overflow, we
9420 may still fail to compute the result for other reasons. */
9421 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9423 REAL_VALUE_TYPE initial_result
;
9425 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9427 /* Ensure we didn't overflow. */
9428 if (! real_isinf (&initial_result
))
9430 const REAL_VALUE_TYPE trunc_result
9431 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9433 /* Only proceed if the target mode can hold the
9435 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9436 return build_real (type
, trunc_result
);
9445 /* Fold a call to builtin modf. */
9448 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9450 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9455 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9458 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9460 /* Proceed if a valid pointer type was passed in. */
9461 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9463 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9464 REAL_VALUE_TYPE trunc
, frac
;
9470 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9471 trunc
= frac
= *value
;
9474 /* For +-Inf, return (*arg1 = arg0, +-0). */
9476 frac
.sign
= value
->sign
;
9480 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9481 real_trunc (&trunc
, VOIDmode
, value
);
9482 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9483 /* If the original number was negative and already
9484 integral, then the fractional part is -0.0. */
9485 if (value
->sign
&& frac
.cl
== rvc_zero
)
9486 frac
.sign
= value
->sign
;
9490 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9491 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9492 build_real (rettype
, trunc
));
9493 TREE_SIDE_EFFECTS (arg1
) = 1;
9494 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9495 build_real (rettype
, frac
));
9501 /* Given a location LOC, an interclass builtin function decl FNDECL
9502 and its single argument ARG, return an folded expression computing
9503 the same, or NULL_TREE if we either couldn't or didn't want to fold
9504 (the latter happen if there's an RTL instruction available). */
9507 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9511 if (!validate_arg (arg
, REAL_TYPE
))
9514 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9517 mode
= TYPE_MODE (TREE_TYPE (arg
));
9519 /* If there is no optab, try generic code. */
9520 switch (DECL_FUNCTION_CODE (fndecl
))
9524 CASE_FLT_FN (BUILT_IN_ISINF
):
9526 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9527 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9528 tree
const type
= TREE_TYPE (arg
);
9532 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9533 real_from_string (&r
, buf
);
9534 result
= build_call_expr (isgr_fn
, 2,
9535 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9536 build_real (type
, r
));
9539 CASE_FLT_FN (BUILT_IN_FINITE
):
9540 case BUILT_IN_ISFINITE
:
9542 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9543 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9544 tree
const type
= TREE_TYPE (arg
);
9548 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9549 real_from_string (&r
, buf
);
9550 result
= build_call_expr (isle_fn
, 2,
9551 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9552 build_real (type
, r
));
9553 /*result = fold_build2_loc (loc, UNGT_EXPR,
9554 TREE_TYPE (TREE_TYPE (fndecl)),
9555 fold_build1_loc (loc, ABS_EXPR, type, arg),
9556 build_real (type, r));
9557 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9558 TREE_TYPE (TREE_TYPE (fndecl)),
9562 case BUILT_IN_ISNORMAL
:
9564 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9565 islessequal(fabs(x),DBL_MAX). */
9566 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9567 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9568 tree
const type
= TREE_TYPE (arg
);
9569 REAL_VALUE_TYPE rmax
, rmin
;
9572 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9573 real_from_string (&rmax
, buf
);
9574 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9575 real_from_string (&rmin
, buf
);
9576 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9577 result
= build_call_expr (isle_fn
, 2, arg
,
9578 build_real (type
, rmax
));
9579 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9580 build_call_expr (isge_fn
, 2, arg
,
9581 build_real (type
, rmin
)));
9591 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9592 ARG is the argument for the call. */
9595 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9597 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9600 if (!validate_arg (arg
, REAL_TYPE
))
9603 switch (builtin_index
)
9605 case BUILT_IN_ISINF
:
9606 if (!HONOR_INFINITIES (arg
))
9607 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9609 if (TREE_CODE (arg
) == REAL_CST
)
9611 r
= TREE_REAL_CST (arg
);
9612 if (real_isinf (&r
))
9613 return real_compare (GT_EXPR
, &r
, &dconst0
)
9614 ? integer_one_node
: integer_minus_one_node
;
9616 return integer_zero_node
;
9621 case BUILT_IN_ISINF_SIGN
:
9623 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9624 /* In a boolean context, GCC will fold the inner COND_EXPR to
9625 1. So e.g. "if (isinf_sign(x))" would be folded to just
9626 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9627 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9628 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9629 tree tmp
= NULL_TREE
;
9631 arg
= builtin_save_expr (arg
);
9633 if (signbit_fn
&& isinf_fn
)
9635 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9636 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9638 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9639 signbit_call
, integer_zero_node
);
9640 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9641 isinf_call
, integer_zero_node
);
9643 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9644 integer_minus_one_node
, integer_one_node
);
9645 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9653 case BUILT_IN_ISFINITE
:
9654 if (!HONOR_NANS (arg
)
9655 && !HONOR_INFINITIES (arg
))
9656 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9658 if (TREE_CODE (arg
) == REAL_CST
)
9660 r
= TREE_REAL_CST (arg
);
9661 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9666 case BUILT_IN_ISNAN
:
9667 if (!HONOR_NANS (arg
))
9668 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9670 if (TREE_CODE (arg
) == REAL_CST
)
9672 r
= TREE_REAL_CST (arg
);
9673 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9676 arg
= builtin_save_expr (arg
);
9677 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9684 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9685 This builtin will generate code to return the appropriate floating
9686 point classification depending on the value of the floating point
9687 number passed in. The possible return values must be supplied as
9688 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9689 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9690 one floating point argument which is "type generic". */
9693 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9695 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9696 arg
, type
, res
, tmp
;
9701 /* Verify the required arguments in the original call. */
9703 || !validate_arg (args
[0], INTEGER_TYPE
)
9704 || !validate_arg (args
[1], INTEGER_TYPE
)
9705 || !validate_arg (args
[2], INTEGER_TYPE
)
9706 || !validate_arg (args
[3], INTEGER_TYPE
)
9707 || !validate_arg (args
[4], INTEGER_TYPE
)
9708 || !validate_arg (args
[5], REAL_TYPE
))
9712 fp_infinite
= args
[1];
9713 fp_normal
= args
[2];
9714 fp_subnormal
= args
[3];
9717 type
= TREE_TYPE (arg
);
9718 mode
= TYPE_MODE (type
);
9719 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9723 (fabs(x) == Inf ? FP_INFINITE :
9724 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9725 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9727 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9728 build_real (type
, dconst0
));
9729 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9730 tmp
, fp_zero
, fp_subnormal
);
9732 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9733 real_from_string (&r
, buf
);
9734 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9735 arg
, build_real (type
, r
));
9736 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9738 if (HONOR_INFINITIES (mode
))
9741 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9742 build_real (type
, r
));
9743 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9747 if (HONOR_NANS (mode
))
9749 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9750 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9756 /* Fold a call to an unordered comparison function such as
9757 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9758 being called and ARG0 and ARG1 are the arguments for the call.
9759 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9760 the opposite of the desired result. UNORDERED_CODE is used
9761 for modes that can hold NaNs and ORDERED_CODE is used for
9765 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9766 enum tree_code unordered_code
,
9767 enum tree_code ordered_code
)
9769 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9770 enum tree_code code
;
9772 enum tree_code code0
, code1
;
9773 tree cmp_type
= NULL_TREE
;
9775 type0
= TREE_TYPE (arg0
);
9776 type1
= TREE_TYPE (arg1
);
9778 code0
= TREE_CODE (type0
);
9779 code1
= TREE_CODE (type1
);
9781 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9782 /* Choose the wider of two real types. */
9783 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9785 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9787 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9790 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9791 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9793 if (unordered_code
== UNORDERED_EXPR
)
9795 if (!HONOR_NANS (arg0
))
9796 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9797 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9800 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9801 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9802 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9805 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9806 arithmetics if it can never overflow, or into internal functions that
9807 return both result of arithmetics and overflowed boolean flag in
9808 a complex integer result, or some other check for overflow. */
9811 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9812 tree arg0
, tree arg1
, tree arg2
)
9814 enum internal_fn ifn
= IFN_LAST
;
9815 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9816 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9819 case BUILT_IN_ADD_OVERFLOW
:
9820 case BUILT_IN_SADD_OVERFLOW
:
9821 case BUILT_IN_SADDL_OVERFLOW
:
9822 case BUILT_IN_SADDLL_OVERFLOW
:
9823 case BUILT_IN_UADD_OVERFLOW
:
9824 case BUILT_IN_UADDL_OVERFLOW
:
9825 case BUILT_IN_UADDLL_OVERFLOW
:
9826 ifn
= IFN_ADD_OVERFLOW
;
9828 case BUILT_IN_SUB_OVERFLOW
:
9829 case BUILT_IN_SSUB_OVERFLOW
:
9830 case BUILT_IN_SSUBL_OVERFLOW
:
9831 case BUILT_IN_SSUBLL_OVERFLOW
:
9832 case BUILT_IN_USUB_OVERFLOW
:
9833 case BUILT_IN_USUBL_OVERFLOW
:
9834 case BUILT_IN_USUBLL_OVERFLOW
:
9835 ifn
= IFN_SUB_OVERFLOW
;
9837 case BUILT_IN_MUL_OVERFLOW
:
9838 case BUILT_IN_SMUL_OVERFLOW
:
9839 case BUILT_IN_SMULL_OVERFLOW
:
9840 case BUILT_IN_SMULLL_OVERFLOW
:
9841 case BUILT_IN_UMUL_OVERFLOW
:
9842 case BUILT_IN_UMULL_OVERFLOW
:
9843 case BUILT_IN_UMULLL_OVERFLOW
:
9844 ifn
= IFN_MUL_OVERFLOW
;
9849 tree ctype
= build_complex_type (type
);
9850 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9852 tree tgt
= save_expr (call
);
9853 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9854 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9855 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9857 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9858 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9861 /* Fold a call to built-in function FNDECL with 0 arguments.
9862 This function returns NULL_TREE if no simplification was possible. */
9865 fold_builtin_0 (location_t loc
, tree fndecl
)
9867 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9868 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9871 CASE_FLT_FN (BUILT_IN_INF
):
9872 case BUILT_IN_INFD32
:
9873 case BUILT_IN_INFD64
:
9874 case BUILT_IN_INFD128
:
9875 return fold_builtin_inf (loc
, type
, true);
9877 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9878 return fold_builtin_inf (loc
, type
, false);
9880 case BUILT_IN_CLASSIFY_TYPE
:
9881 return fold_builtin_classify_type (NULL_TREE
);
9889 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9890 This function returns NULL_TREE if no simplification was possible. */
9893 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9895 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9896 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9899 case BUILT_IN_CONSTANT_P
:
9901 tree val
= fold_builtin_constant_p (arg0
);
9903 /* Gimplification will pull the CALL_EXPR for the builtin out of
9904 an if condition. When not optimizing, we'll not CSE it back.
9905 To avoid link error types of regressions, return false now. */
9906 if (!val
&& !optimize
)
9907 val
= integer_zero_node
;
9912 case BUILT_IN_CLASSIFY_TYPE
:
9913 return fold_builtin_classify_type (arg0
);
9915 case BUILT_IN_STRLEN
:
9916 return fold_builtin_strlen (loc
, type
, arg0
);
9918 CASE_FLT_FN (BUILT_IN_FABS
):
9919 case BUILT_IN_FABSD32
:
9920 case BUILT_IN_FABSD64
:
9921 case BUILT_IN_FABSD128
:
9922 return fold_builtin_fabs (loc
, arg0
, type
);
9926 case BUILT_IN_LLABS
:
9927 case BUILT_IN_IMAXABS
:
9928 return fold_builtin_abs (loc
, arg0
, type
);
9930 CASE_FLT_FN (BUILT_IN_CONJ
):
9931 if (validate_arg (arg0
, COMPLEX_TYPE
)
9932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9933 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9936 CASE_FLT_FN (BUILT_IN_CREAL
):
9937 if (validate_arg (arg0
, COMPLEX_TYPE
)
9938 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9939 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9942 CASE_FLT_FN (BUILT_IN_CIMAG
):
9943 if (validate_arg (arg0
, COMPLEX_TYPE
)
9944 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9945 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9948 CASE_FLT_FN (BUILT_IN_CCOS
):
9949 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9951 CASE_FLT_FN (BUILT_IN_CCOSH
):
9952 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9954 CASE_FLT_FN (BUILT_IN_CPROJ
):
9955 return fold_builtin_cproj (loc
, arg0
, type
);
9957 CASE_FLT_FN (BUILT_IN_CSIN
):
9958 if (validate_arg (arg0
, COMPLEX_TYPE
)
9959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9960 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9963 CASE_FLT_FN (BUILT_IN_CSINH
):
9964 if (validate_arg (arg0
, COMPLEX_TYPE
)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9966 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9969 CASE_FLT_FN (BUILT_IN_CTAN
):
9970 if (validate_arg (arg0
, COMPLEX_TYPE
)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9972 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9975 CASE_FLT_FN (BUILT_IN_CTANH
):
9976 if (validate_arg (arg0
, COMPLEX_TYPE
)
9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9978 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9981 CASE_FLT_FN (BUILT_IN_CLOG
):
9982 if (validate_arg (arg0
, COMPLEX_TYPE
)
9983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9984 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9987 CASE_FLT_FN (BUILT_IN_CSQRT
):
9988 if (validate_arg (arg0
, COMPLEX_TYPE
)
9989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9990 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9993 CASE_FLT_FN (BUILT_IN_CASIN
):
9994 if (validate_arg (arg0
, COMPLEX_TYPE
)
9995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9996 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9999 CASE_FLT_FN (BUILT_IN_CACOS
):
10000 if (validate_arg (arg0
, COMPLEX_TYPE
)
10001 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10002 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10005 CASE_FLT_FN (BUILT_IN_CATAN
):
10006 if (validate_arg (arg0
, COMPLEX_TYPE
)
10007 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10008 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10011 CASE_FLT_FN (BUILT_IN_CASINH
):
10012 if (validate_arg (arg0
, COMPLEX_TYPE
)
10013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10014 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10017 CASE_FLT_FN (BUILT_IN_CACOSH
):
10018 if (validate_arg (arg0
, COMPLEX_TYPE
)
10019 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10020 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10023 CASE_FLT_FN (BUILT_IN_CATANH
):
10024 if (validate_arg (arg0
, COMPLEX_TYPE
)
10025 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10026 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10029 CASE_FLT_FN (BUILT_IN_CABS
):
10030 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10032 CASE_FLT_FN (BUILT_IN_CARG
):
10033 return fold_builtin_carg (loc
, arg0
, type
);
10035 CASE_FLT_FN (BUILT_IN_SQRT
):
10036 return fold_builtin_sqrt (loc
, arg0
, type
);
10038 CASE_FLT_FN (BUILT_IN_CBRT
):
10039 return fold_builtin_cbrt (loc
, arg0
, type
);
10041 CASE_FLT_FN (BUILT_IN_ASIN
):
10042 if (validate_arg (arg0
, REAL_TYPE
))
10043 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10044 &dconstm1
, &dconst1
, true);
10047 CASE_FLT_FN (BUILT_IN_ACOS
):
10048 if (validate_arg (arg0
, REAL_TYPE
))
10049 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10050 &dconstm1
, &dconst1
, true);
10053 CASE_FLT_FN (BUILT_IN_ATAN
):
10054 if (validate_arg (arg0
, REAL_TYPE
))
10055 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10058 CASE_FLT_FN (BUILT_IN_ASINH
):
10059 if (validate_arg (arg0
, REAL_TYPE
))
10060 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10063 CASE_FLT_FN (BUILT_IN_ACOSH
):
10064 if (validate_arg (arg0
, REAL_TYPE
))
10065 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10066 &dconst1
, NULL
, true);
10069 CASE_FLT_FN (BUILT_IN_ATANH
):
10070 if (validate_arg (arg0
, REAL_TYPE
))
10071 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10072 &dconstm1
, &dconst1
, false);
10075 CASE_FLT_FN (BUILT_IN_SIN
):
10076 if (validate_arg (arg0
, REAL_TYPE
))
10077 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10080 CASE_FLT_FN (BUILT_IN_COS
):
10081 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10083 CASE_FLT_FN (BUILT_IN_TAN
):
10084 return fold_builtin_tan (arg0
, type
);
10086 CASE_FLT_FN (BUILT_IN_CEXP
):
10087 return fold_builtin_cexp (loc
, arg0
, type
);
10089 CASE_FLT_FN (BUILT_IN_CEXPI
):
10090 if (validate_arg (arg0
, REAL_TYPE
))
10091 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10094 CASE_FLT_FN (BUILT_IN_SINH
):
10095 if (validate_arg (arg0
, REAL_TYPE
))
10096 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10099 CASE_FLT_FN (BUILT_IN_COSH
):
10100 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10102 CASE_FLT_FN (BUILT_IN_TANH
):
10103 if (validate_arg (arg0
, REAL_TYPE
))
10104 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10107 CASE_FLT_FN (BUILT_IN_ERF
):
10108 if (validate_arg (arg0
, REAL_TYPE
))
10109 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10112 CASE_FLT_FN (BUILT_IN_ERFC
):
10113 if (validate_arg (arg0
, REAL_TYPE
))
10114 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10117 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10118 if (validate_arg (arg0
, REAL_TYPE
))
10119 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10122 CASE_FLT_FN (BUILT_IN_EXP
):
10123 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10125 CASE_FLT_FN (BUILT_IN_EXP2
):
10126 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10128 CASE_FLT_FN (BUILT_IN_EXP10
):
10129 CASE_FLT_FN (BUILT_IN_POW10
):
10130 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10132 CASE_FLT_FN (BUILT_IN_EXPM1
):
10133 if (validate_arg (arg0
, REAL_TYPE
))
10134 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10137 CASE_FLT_FN (BUILT_IN_LOG
):
10138 if (validate_arg (arg0
, REAL_TYPE
))
10139 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10142 CASE_FLT_FN (BUILT_IN_LOG2
):
10143 if (validate_arg (arg0
, REAL_TYPE
))
10144 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10147 CASE_FLT_FN (BUILT_IN_LOG10
):
10148 if (validate_arg (arg0
, REAL_TYPE
))
10149 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10152 CASE_FLT_FN (BUILT_IN_LOG1P
):
10153 if (validate_arg (arg0
, REAL_TYPE
))
10154 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10155 &dconstm1
, NULL
, false);
10158 CASE_FLT_FN (BUILT_IN_J0
):
10159 if (validate_arg (arg0
, REAL_TYPE
))
10160 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10164 CASE_FLT_FN (BUILT_IN_J1
):
10165 if (validate_arg (arg0
, REAL_TYPE
))
10166 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10170 CASE_FLT_FN (BUILT_IN_Y0
):
10171 if (validate_arg (arg0
, REAL_TYPE
))
10172 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10173 &dconst0
, NULL
, false);
10176 CASE_FLT_FN (BUILT_IN_Y1
):
10177 if (validate_arg (arg0
, REAL_TYPE
))
10178 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10179 &dconst0
, NULL
, false);
10182 CASE_FLT_FN (BUILT_IN_NAN
):
10183 case BUILT_IN_NAND32
:
10184 case BUILT_IN_NAND64
:
10185 case BUILT_IN_NAND128
:
10186 return fold_builtin_nan (arg0
, type
, true);
10188 CASE_FLT_FN (BUILT_IN_NANS
):
10189 return fold_builtin_nan (arg0
, type
, false);
10191 CASE_FLT_FN (BUILT_IN_FLOOR
):
10192 return fold_builtin_floor (loc
, fndecl
, arg0
);
10194 CASE_FLT_FN (BUILT_IN_CEIL
):
10195 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10197 CASE_FLT_FN (BUILT_IN_TRUNC
):
10198 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10200 CASE_FLT_FN (BUILT_IN_ROUND
):
10201 return fold_builtin_round (loc
, fndecl
, arg0
);
10203 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10204 CASE_FLT_FN (BUILT_IN_RINT
):
10205 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10207 CASE_FLT_FN (BUILT_IN_ICEIL
):
10208 CASE_FLT_FN (BUILT_IN_LCEIL
):
10209 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10210 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10211 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10212 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10213 CASE_FLT_FN (BUILT_IN_IROUND
):
10214 CASE_FLT_FN (BUILT_IN_LROUND
):
10215 CASE_FLT_FN (BUILT_IN_LLROUND
):
10216 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10218 CASE_FLT_FN (BUILT_IN_IRINT
):
10219 CASE_FLT_FN (BUILT_IN_LRINT
):
10220 CASE_FLT_FN (BUILT_IN_LLRINT
):
10221 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10223 case BUILT_IN_BSWAP16
:
10224 case BUILT_IN_BSWAP32
:
10225 case BUILT_IN_BSWAP64
:
10226 return fold_builtin_bswap (fndecl
, arg0
);
10228 CASE_INT_FN (BUILT_IN_FFS
):
10229 CASE_INT_FN (BUILT_IN_CLZ
):
10230 CASE_INT_FN (BUILT_IN_CTZ
):
10231 CASE_INT_FN (BUILT_IN_CLRSB
):
10232 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10233 CASE_INT_FN (BUILT_IN_PARITY
):
10234 return fold_builtin_bitop (fndecl
, arg0
);
10236 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10237 return fold_builtin_signbit (loc
, arg0
, type
);
10239 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10240 return fold_builtin_significand (loc
, arg0
, type
);
10242 CASE_FLT_FN (BUILT_IN_ILOGB
):
10243 CASE_FLT_FN (BUILT_IN_LOGB
):
10244 return fold_builtin_logb (loc
, arg0
, type
);
10246 case BUILT_IN_ISASCII
:
10247 return fold_builtin_isascii (loc
, arg0
);
10249 case BUILT_IN_TOASCII
:
10250 return fold_builtin_toascii (loc
, arg0
);
10252 case BUILT_IN_ISDIGIT
:
10253 return fold_builtin_isdigit (loc
, arg0
);
10255 CASE_FLT_FN (BUILT_IN_FINITE
):
10256 case BUILT_IN_FINITED32
:
10257 case BUILT_IN_FINITED64
:
10258 case BUILT_IN_FINITED128
:
10259 case BUILT_IN_ISFINITE
:
10261 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10264 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10267 CASE_FLT_FN (BUILT_IN_ISINF
):
10268 case BUILT_IN_ISINFD32
:
10269 case BUILT_IN_ISINFD64
:
10270 case BUILT_IN_ISINFD128
:
10272 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10275 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10278 case BUILT_IN_ISNORMAL
:
10279 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10281 case BUILT_IN_ISINF_SIGN
:
10282 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10284 CASE_FLT_FN (BUILT_IN_ISNAN
):
10285 case BUILT_IN_ISNAND32
:
10286 case BUILT_IN_ISNAND64
:
10287 case BUILT_IN_ISNAND128
:
10288 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10290 case BUILT_IN_FREE
:
10291 if (integer_zerop (arg0
))
10292 return build_empty_stmt (loc
);
10303 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10304 This function returns NULL_TREE if no simplification was possible. */
10307 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10309 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10310 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10314 CASE_FLT_FN (BUILT_IN_JN
):
10315 if (validate_arg (arg0
, INTEGER_TYPE
)
10316 && validate_arg (arg1
, REAL_TYPE
))
10317 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10320 CASE_FLT_FN (BUILT_IN_YN
):
10321 if (validate_arg (arg0
, INTEGER_TYPE
)
10322 && validate_arg (arg1
, REAL_TYPE
))
10323 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10327 CASE_FLT_FN (BUILT_IN_DREM
):
10328 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10329 if (validate_arg (arg0
, REAL_TYPE
)
10330 && validate_arg (arg1
, REAL_TYPE
))
10331 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10334 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10335 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10336 if (validate_arg (arg0
, REAL_TYPE
)
10337 && validate_arg (arg1
, POINTER_TYPE
))
10338 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10341 CASE_FLT_FN (BUILT_IN_ATAN2
):
10342 if (validate_arg (arg0
, REAL_TYPE
)
10343 && validate_arg (arg1
, REAL_TYPE
))
10344 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10347 CASE_FLT_FN (BUILT_IN_FDIM
):
10348 if (validate_arg (arg0
, REAL_TYPE
)
10349 && validate_arg (arg1
, REAL_TYPE
))
10350 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10353 CASE_FLT_FN (BUILT_IN_HYPOT
):
10354 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10356 CASE_FLT_FN (BUILT_IN_CPOW
):
10357 if (validate_arg (arg0
, COMPLEX_TYPE
)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10359 && validate_arg (arg1
, COMPLEX_TYPE
)
10360 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10361 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10364 CASE_FLT_FN (BUILT_IN_LDEXP
):
10365 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10366 CASE_FLT_FN (BUILT_IN_SCALBN
):
10367 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10368 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10369 type
, /*ldexp=*/false);
10371 CASE_FLT_FN (BUILT_IN_FREXP
):
10372 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10374 CASE_FLT_FN (BUILT_IN_MODF
):
10375 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10377 case BUILT_IN_STRSTR
:
10378 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10380 case BUILT_IN_STRSPN
:
10381 return fold_builtin_strspn (loc
, arg0
, arg1
);
10383 case BUILT_IN_STRCSPN
:
10384 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10386 case BUILT_IN_STRCHR
:
10387 case BUILT_IN_INDEX
:
10388 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10390 case BUILT_IN_STRRCHR
:
10391 case BUILT_IN_RINDEX
:
10392 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10394 case BUILT_IN_STRCMP
:
10395 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10397 case BUILT_IN_STRPBRK
:
10398 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10400 case BUILT_IN_EXPECT
:
10401 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10403 CASE_FLT_FN (BUILT_IN_POW
):
10404 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10406 CASE_FLT_FN (BUILT_IN_POWI
):
10407 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10409 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10410 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10412 CASE_FLT_FN (BUILT_IN_FMIN
):
10413 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10415 CASE_FLT_FN (BUILT_IN_FMAX
):
10416 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10418 case BUILT_IN_ISGREATER
:
10419 return fold_builtin_unordered_cmp (loc
, fndecl
,
10420 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10421 case BUILT_IN_ISGREATEREQUAL
:
10422 return fold_builtin_unordered_cmp (loc
, fndecl
,
10423 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10424 case BUILT_IN_ISLESS
:
10425 return fold_builtin_unordered_cmp (loc
, fndecl
,
10426 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10427 case BUILT_IN_ISLESSEQUAL
:
10428 return fold_builtin_unordered_cmp (loc
, fndecl
,
10429 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10430 case BUILT_IN_ISLESSGREATER
:
10431 return fold_builtin_unordered_cmp (loc
, fndecl
,
10432 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10433 case BUILT_IN_ISUNORDERED
:
10434 return fold_builtin_unordered_cmp (loc
, fndecl
,
10435 arg0
, arg1
, UNORDERED_EXPR
,
10438 /* We do the folding for va_start in the expander. */
10439 case BUILT_IN_VA_START
:
10442 case BUILT_IN_OBJECT_SIZE
:
10443 return fold_builtin_object_size (arg0
, arg1
);
10445 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10446 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10448 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10449 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10457 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10459 This function returns NULL_TREE if no simplification was possible. */
10462 fold_builtin_3 (location_t loc
, tree fndecl
,
10463 tree arg0
, tree arg1
, tree arg2
)
10465 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10466 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10470 CASE_FLT_FN (BUILT_IN_SINCOS
):
10471 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10473 CASE_FLT_FN (BUILT_IN_FMA
):
10474 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10477 CASE_FLT_FN (BUILT_IN_REMQUO
):
10478 if (validate_arg (arg0
, REAL_TYPE
)
10479 && validate_arg (arg1
, REAL_TYPE
)
10480 && validate_arg (arg2
, POINTER_TYPE
))
10481 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10484 case BUILT_IN_STRNCMP
:
10485 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10487 case BUILT_IN_MEMCHR
:
10488 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10490 case BUILT_IN_BCMP
:
10491 case BUILT_IN_MEMCMP
:
10492 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10494 case BUILT_IN_EXPECT
:
10495 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10497 case BUILT_IN_ADD_OVERFLOW
:
10498 case BUILT_IN_SUB_OVERFLOW
:
10499 case BUILT_IN_MUL_OVERFLOW
:
10500 case BUILT_IN_SADD_OVERFLOW
:
10501 case BUILT_IN_SADDL_OVERFLOW
:
10502 case BUILT_IN_SADDLL_OVERFLOW
:
10503 case BUILT_IN_SSUB_OVERFLOW
:
10504 case BUILT_IN_SSUBL_OVERFLOW
:
10505 case BUILT_IN_SSUBLL_OVERFLOW
:
10506 case BUILT_IN_SMUL_OVERFLOW
:
10507 case BUILT_IN_SMULL_OVERFLOW
:
10508 case BUILT_IN_SMULLL_OVERFLOW
:
10509 case BUILT_IN_UADD_OVERFLOW
:
10510 case BUILT_IN_UADDL_OVERFLOW
:
10511 case BUILT_IN_UADDLL_OVERFLOW
:
10512 case BUILT_IN_USUB_OVERFLOW
:
10513 case BUILT_IN_USUBL_OVERFLOW
:
10514 case BUILT_IN_USUBLL_OVERFLOW
:
10515 case BUILT_IN_UMUL_OVERFLOW
:
10516 case BUILT_IN_UMULL_OVERFLOW
:
10517 case BUILT_IN_UMULLL_OVERFLOW
:
10518 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10526 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10527 arguments. IGNORE is true if the result of the
10528 function call is ignored. This function returns NULL_TREE if no
10529 simplification was possible. */
10532 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10534 tree ret
= NULL_TREE
;
10539 ret
= fold_builtin_0 (loc
, fndecl
);
10542 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10545 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10548 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10551 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10556 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10557 SET_EXPR_LOCATION (ret
, loc
);
10558 TREE_NO_WARNING (ret
) = 1;
10564 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10565 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10566 of arguments in ARGS to be omitted. OLDNARGS is the number of
10567 elements in ARGS. */
10570 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10571 int skip
, tree fndecl
, int n
, va_list newargs
)
10573 int nargs
= oldnargs
- skip
+ n
;
10580 buffer
= XALLOCAVEC (tree
, nargs
);
10581 for (i
= 0; i
< n
; i
++)
10582 buffer
[i
] = va_arg (newargs
, tree
);
10583 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10584 buffer
[i
] = args
[j
];
10587 buffer
= args
+ skip
;
10589 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10592 /* Return true if FNDECL shouldn't be folded right now.
10593 If a built-in function has an inline attribute always_inline
10594 wrapper, defer folding it after always_inline functions have
10595 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10596 might not be performed. */
10599 avoid_folding_inline_builtin (tree fndecl
)
10601 return (DECL_DECLARED_INLINE_P (fndecl
)
10602 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10604 && !cfun
->always_inline_functions_inlined
10605 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10608 /* A wrapper function for builtin folding that prevents warnings for
10609 "statement without effect" and the like, caused by removing the
10610 call node earlier than the warning is generated. */
10613 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10615 tree ret
= NULL_TREE
;
10616 tree fndecl
= get_callee_fndecl (exp
);
10618 && TREE_CODE (fndecl
) == FUNCTION_DECL
10619 && DECL_BUILT_IN (fndecl
)
10620 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10621 yet. Defer folding until we see all the arguments
10622 (after inlining). */
10623 && !CALL_EXPR_VA_ARG_PACK (exp
))
10625 int nargs
= call_expr_nargs (exp
);
10627 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10628 instead last argument is __builtin_va_arg_pack (). Defer folding
10629 even in that case, until arguments are finalized. */
10630 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10632 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10634 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10635 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10636 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10640 if (avoid_folding_inline_builtin (fndecl
))
10643 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10644 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10645 CALL_EXPR_ARGP (exp
), ignore
);
10648 tree
*args
= CALL_EXPR_ARGP (exp
);
10649 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10657 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10658 N arguments are passed in the array ARGARRAY. Return a folded
10659 expression or NULL_TREE if no simplification was possible. */
10662 fold_builtin_call_array (location_t loc
, tree
,
10667 if (TREE_CODE (fn
) != ADDR_EXPR
)
10670 tree fndecl
= TREE_OPERAND (fn
, 0);
10671 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10672 && DECL_BUILT_IN (fndecl
))
10674 /* If last argument is __builtin_va_arg_pack (), arguments to this
10675 function are not finalized yet. Defer folding until they are. */
10676 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10678 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10680 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10681 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10682 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10685 if (avoid_folding_inline_builtin (fndecl
))
10687 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10688 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10690 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10696 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10697 along with N new arguments specified as the "..." parameters. SKIP
10698 is the number of arguments in EXP to be omitted. This function is used
10699 to do varargs-to-varargs transformations. */
10702 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10708 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10709 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10715 /* Validate a single argument ARG against a tree code CODE representing
10719 validate_arg (const_tree arg
, enum tree_code code
)
10723 else if (code
== POINTER_TYPE
)
10724 return POINTER_TYPE_P (TREE_TYPE (arg
));
10725 else if (code
== INTEGER_TYPE
)
10726 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10727 return code
== TREE_CODE (TREE_TYPE (arg
));
10730 /* This function validates the types of a function call argument list
10731 against a specified list of tree_codes. If the last specifier is a 0,
10732 that represents an ellipses, otherwise the last specifier must be a
10735 This is the GIMPLE version of validate_arglist. Eventually we want to
10736 completely convert builtins.c to work from GIMPLEs and the tree based
10737 validate_arglist will then be removed. */
10740 validate_gimple_arglist (const gcall
*call
, ...)
10742 enum tree_code code
;
10748 va_start (ap
, call
);
10753 code
= (enum tree_code
) va_arg (ap
, int);
10757 /* This signifies an ellipses, any further arguments are all ok. */
10761 /* This signifies an endlink, if no arguments remain, return
10762 true, otherwise return false. */
10763 res
= (i
== gimple_call_num_args (call
));
10766 /* If no parameters remain or the parameter's code does not
10767 match the specified code, return false. Otherwise continue
10768 checking any remaining arguments. */
10769 arg
= gimple_call_arg (call
, i
++);
10770 if (!validate_arg (arg
, code
))
10777 /* We need gotos here since we can only have one VA_CLOSE in a
10785 /* Default target-specific builtin expander that does nothing. */
10788 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10789 rtx target ATTRIBUTE_UNUSED
,
10790 rtx subtarget ATTRIBUTE_UNUSED
,
10791 machine_mode mode ATTRIBUTE_UNUSED
,
10792 int ignore ATTRIBUTE_UNUSED
)
10797 /* Returns true is EXP represents data that would potentially reside
10798 in a readonly section. */
10801 readonly_data_expr (tree exp
)
10805 if (TREE_CODE (exp
) != ADDR_EXPR
)
10808 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10812 /* Make sure we call decl_readonly_section only for trees it
10813 can handle (since it returns true for everything it doesn't
10815 if (TREE_CODE (exp
) == STRING_CST
10816 || TREE_CODE (exp
) == CONSTRUCTOR
10817 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10818 return decl_readonly_section (exp
, 0);
10823 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10824 to the call, and TYPE is its return type.
10826 Return NULL_TREE if no simplification was possible, otherwise return the
10827 simplified form of the call as a tree.
10829 The simplified form may be a constant or other expression which
10830 computes the same value, but in a more efficient manner (including
10831 calls to other builtin functions).
10833 The call may contain arguments which need to be evaluated, but
10834 which are not useful to determine the result of the call. In
10835 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10836 COMPOUND_EXPR will be an argument which must be evaluated.
10837 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10838 COMPOUND_EXPR in the chain will contain the tree for the simplified
10839 form of the builtin function call. */
10842 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10844 if (!validate_arg (s1
, POINTER_TYPE
)
10845 || !validate_arg (s2
, POINTER_TYPE
))
10850 const char *p1
, *p2
;
10852 p2
= c_getstr (s2
);
10856 p1
= c_getstr (s1
);
10859 const char *r
= strstr (p1
, p2
);
10863 return build_int_cst (TREE_TYPE (s1
), 0);
10865 /* Return an offset into the constant string argument. */
10866 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10867 return fold_convert_loc (loc
, type
, tem
);
10870 /* The argument is const char *, and the result is char *, so we need
10871 a type conversion here to avoid a warning. */
10873 return fold_convert_loc (loc
, type
, s1
);
10878 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10882 /* New argument list transforming strstr(s1, s2) to
10883 strchr(s1, s2[0]). */
10884 return build_call_expr_loc (loc
, fn
, 2, s1
,
10885 build_int_cst (integer_type_node
, p2
[0]));
10889 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10890 the call, and TYPE is its return type.
10892 Return NULL_TREE if no simplification was possible, otherwise return the
10893 simplified form of the call as a tree.
10895 The simplified form may be a constant or other expression which
10896 computes the same value, but in a more efficient manner (including
10897 calls to other builtin functions).
10899 The call may contain arguments which need to be evaluated, but
10900 which are not useful to determine the result of the call. In
10901 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10902 COMPOUND_EXPR will be an argument which must be evaluated.
10903 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10904 COMPOUND_EXPR in the chain will contain the tree for the simplified
10905 form of the builtin function call. */
10908 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10910 if (!validate_arg (s1
, POINTER_TYPE
)
10911 || !validate_arg (s2
, INTEGER_TYPE
))
10917 if (TREE_CODE (s2
) != INTEGER_CST
)
10920 p1
= c_getstr (s1
);
10927 if (target_char_cast (s2
, &c
))
10930 r
= strchr (p1
, c
);
10933 return build_int_cst (TREE_TYPE (s1
), 0);
10935 /* Return an offset into the constant string argument. */
10936 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10937 return fold_convert_loc (loc
, type
, tem
);
10943 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10944 the call, and TYPE is its return type.
10946 Return NULL_TREE if no simplification was possible, otherwise return the
10947 simplified form of the call as a tree.
10949 The simplified form may be a constant or other expression which
10950 computes the same value, but in a more efficient manner (including
10951 calls to other builtin functions).
10953 The call may contain arguments which need to be evaluated, but
10954 which are not useful to determine the result of the call. In
10955 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10956 COMPOUND_EXPR will be an argument which must be evaluated.
10957 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10958 COMPOUND_EXPR in the chain will contain the tree for the simplified
10959 form of the builtin function call. */
10962 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10964 if (!validate_arg (s1
, POINTER_TYPE
)
10965 || !validate_arg (s2
, INTEGER_TYPE
))
10972 if (TREE_CODE (s2
) != INTEGER_CST
)
10975 p1
= c_getstr (s1
);
10982 if (target_char_cast (s2
, &c
))
10985 r
= strrchr (p1
, c
);
10988 return build_int_cst (TREE_TYPE (s1
), 0);
10990 /* Return an offset into the constant string argument. */
10991 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10992 return fold_convert_loc (loc
, type
, tem
);
10995 if (! integer_zerop (s2
))
10998 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11002 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11003 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11007 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11008 to the call, and TYPE is its return type.
11010 Return NULL_TREE if no simplification was possible, otherwise return the
11011 simplified form of the call as a tree.
11013 The simplified form may be a constant or other expression which
11014 computes the same value, but in a more efficient manner (including
11015 calls to other builtin functions).
11017 The call may contain arguments which need to be evaluated, but
11018 which are not useful to determine the result of the call. In
11019 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11020 COMPOUND_EXPR will be an argument which must be evaluated.
11021 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11022 COMPOUND_EXPR in the chain will contain the tree for the simplified
11023 form of the builtin function call. */
11026 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11028 if (!validate_arg (s1
, POINTER_TYPE
)
11029 || !validate_arg (s2
, POINTER_TYPE
))
11034 const char *p1
, *p2
;
11036 p2
= c_getstr (s2
);
11040 p1
= c_getstr (s1
);
11043 const char *r
= strpbrk (p1
, p2
);
11047 return build_int_cst (TREE_TYPE (s1
), 0);
11049 /* Return an offset into the constant string argument. */
11050 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11051 return fold_convert_loc (loc
, type
, tem
);
11055 /* strpbrk(x, "") == NULL.
11056 Evaluate and ignore s1 in case it had side-effects. */
11057 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11060 return NULL_TREE
; /* Really call strpbrk. */
11062 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11066 /* New argument list transforming strpbrk(s1, s2) to
11067 strchr(s1, s2[0]). */
11068 return build_call_expr_loc (loc
, fn
, 2, s1
,
11069 build_int_cst (integer_type_node
, p2
[0]));
11073 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11076 Return NULL_TREE if no simplification was possible, otherwise return the
11077 simplified form of the call as a tree.
11079 The simplified form may be a constant or other expression which
11080 computes the same value, but in a more efficient manner (including
11081 calls to other builtin functions).
11083 The call may contain arguments which need to be evaluated, but
11084 which are not useful to determine the result of the call. In
11085 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11086 COMPOUND_EXPR will be an argument which must be evaluated.
11087 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11088 COMPOUND_EXPR in the chain will contain the tree for the simplified
11089 form of the builtin function call. */
11092 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11094 if (!validate_arg (s1
, POINTER_TYPE
)
11095 || !validate_arg (s2
, POINTER_TYPE
))
11099 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11101 /* If both arguments are constants, evaluate at compile-time. */
11104 const size_t r
= strspn (p1
, p2
);
11105 return build_int_cst (size_type_node
, r
);
11108 /* If either argument is "", return NULL_TREE. */
11109 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11110 /* Evaluate and ignore both arguments in case either one has
11112 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11118 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11121 Return NULL_TREE if no simplification was possible, otherwise return the
11122 simplified form of the call as a tree.
11124 The simplified form may be a constant or other expression which
11125 computes the same value, but in a more efficient manner (including
11126 calls to other builtin functions).
11128 The call may contain arguments which need to be evaluated, but
11129 which are not useful to determine the result of the call. In
11130 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11131 COMPOUND_EXPR will be an argument which must be evaluated.
11132 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11133 COMPOUND_EXPR in the chain will contain the tree for the simplified
11134 form of the builtin function call. */
11137 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11139 if (!validate_arg (s1
, POINTER_TYPE
)
11140 || !validate_arg (s2
, POINTER_TYPE
))
11144 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11146 /* If both arguments are constants, evaluate at compile-time. */
11149 const size_t r
= strcspn (p1
, p2
);
11150 return build_int_cst (size_type_node
, r
);
11153 /* If the first argument is "", return NULL_TREE. */
11154 if (p1
&& *p1
== '\0')
11156 /* Evaluate and ignore argument s2 in case it has
11158 return omit_one_operand_loc (loc
, size_type_node
,
11159 size_zero_node
, s2
);
11162 /* If the second argument is "", return __builtin_strlen(s1). */
11163 if (p2
&& *p2
== '\0')
11165 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11167 /* If the replacement _DECL isn't initialized, don't do the
11172 return build_call_expr_loc (loc
, fn
, 1, s1
);
11178 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11179 produced. False otherwise. This is done so that we don't output the error
11180 or warning twice or three times. */
11183 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11185 tree fntype
= TREE_TYPE (current_function_decl
);
11186 int nargs
= call_expr_nargs (exp
);
11188 /* There is good chance the current input_location points inside the
11189 definition of the va_start macro (perhaps on the token for
11190 builtin) in a system header, so warnings will not be emitted.
11191 Use the location in real source code. */
11192 source_location current_location
=
11193 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11196 if (!stdarg_p (fntype
))
11198 error ("%<va_start%> used in function with fixed args");
11204 if (va_start_p
&& (nargs
!= 2))
11206 error ("wrong number of arguments to function %<va_start%>");
11209 arg
= CALL_EXPR_ARG (exp
, 1);
11211 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11212 when we checked the arguments and if needed issued a warning. */
11217 /* Evidently an out of date version of <stdarg.h>; can't validate
11218 va_start's second argument, but can still work as intended. */
11219 warning_at (current_location
,
11221 "%<__builtin_next_arg%> called without an argument");
11224 else if (nargs
> 1)
11226 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11229 arg
= CALL_EXPR_ARG (exp
, 0);
11232 if (TREE_CODE (arg
) == SSA_NAME
)
11233 arg
= SSA_NAME_VAR (arg
);
11235 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11236 or __builtin_next_arg (0) the first time we see it, after checking
11237 the arguments and if needed issuing a warning. */
11238 if (!integer_zerop (arg
))
11240 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11242 /* Strip off all nops for the sake of the comparison. This
11243 is not quite the same as STRIP_NOPS. It does more.
11244 We must also strip off INDIRECT_EXPR for C++ reference
11246 while (CONVERT_EXPR_P (arg
)
11247 || TREE_CODE (arg
) == INDIRECT_REF
)
11248 arg
= TREE_OPERAND (arg
, 0);
11249 if (arg
!= last_parm
)
11251 /* FIXME: Sometimes with the tree optimizers we can get the
11252 not the last argument even though the user used the last
11253 argument. We just warn and set the arg to be the last
11254 argument so that we will get wrong-code because of
11256 warning_at (current_location
,
11258 "second parameter of %<va_start%> not last named argument");
11261 /* Undefined by C99 7.15.1.4p4 (va_start):
11262 "If the parameter parmN is declared with the register storage
11263 class, with a function or array type, or with a type that is
11264 not compatible with the type that results after application of
11265 the default argument promotions, the behavior is undefined."
11267 else if (DECL_REGISTER (arg
))
11269 warning_at (current_location
,
11271 "undefined behaviour when second parameter of "
11272 "%<va_start%> is declared with %<register%> storage");
11275 /* We want to verify the second parameter just once before the tree
11276 optimizers are run and then avoid keeping it in the tree,
11277 as otherwise we could warn even for correct code like:
11278 void foo (int i, ...)
11279 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11281 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11283 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11289 /* Expand a call EXP to __builtin_object_size. */
11292 expand_builtin_object_size (tree exp
)
11295 int object_size_type
;
11296 tree fndecl
= get_callee_fndecl (exp
);
11298 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11300 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11302 expand_builtin_trap ();
11306 ost
= CALL_EXPR_ARG (exp
, 1);
11309 if (TREE_CODE (ost
) != INTEGER_CST
11310 || tree_int_cst_sgn (ost
) < 0
11311 || compare_tree_int (ost
, 3) > 0)
11313 error ("%Klast argument of %D is not integer constant between 0 and 3",
11315 expand_builtin_trap ();
11319 object_size_type
= tree_to_shwi (ost
);
11321 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11324 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11325 FCODE is the BUILT_IN_* to use.
11326 Return NULL_RTX if we failed; the caller should emit a normal call,
11327 otherwise try to get the result in TARGET, if convenient (and in
11328 mode MODE if that's convenient). */
11331 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11332 enum built_in_function fcode
)
11334 tree dest
, src
, len
, size
;
11336 if (!validate_arglist (exp
,
11338 fcode
== BUILT_IN_MEMSET_CHK
11339 ? INTEGER_TYPE
: POINTER_TYPE
,
11340 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11343 dest
= CALL_EXPR_ARG (exp
, 0);
11344 src
= CALL_EXPR_ARG (exp
, 1);
11345 len
= CALL_EXPR_ARG (exp
, 2);
11346 size
= CALL_EXPR_ARG (exp
, 3);
11348 if (! tree_fits_uhwi_p (size
))
11351 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11355 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11357 warning_at (tree_nonartificial_location (exp
),
11358 0, "%Kcall to %D will always overflow destination buffer",
11359 exp
, get_callee_fndecl (exp
));
11364 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11365 mem{cpy,pcpy,move,set} is available. */
11368 case BUILT_IN_MEMCPY_CHK
:
11369 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11371 case BUILT_IN_MEMPCPY_CHK
:
11372 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11374 case BUILT_IN_MEMMOVE_CHK
:
11375 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11377 case BUILT_IN_MEMSET_CHK
:
11378 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11387 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11388 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11389 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11390 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11392 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11396 unsigned int dest_align
= get_pointer_alignment (dest
);
11398 /* If DEST is not a pointer type, call the normal function. */
11399 if (dest_align
== 0)
11402 /* If SRC and DEST are the same (and not volatile), do nothing. */
11403 if (operand_equal_p (src
, dest
, 0))
11407 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11409 /* Evaluate and ignore LEN in case it has side-effects. */
11410 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11411 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11414 expr
= fold_build_pointer_plus (dest
, len
);
11415 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11418 /* __memmove_chk special case. */
11419 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11421 unsigned int src_align
= get_pointer_alignment (src
);
11423 if (src_align
== 0)
11426 /* If src is categorized for a readonly section we can use
11427 normal __memcpy_chk. */
11428 if (readonly_data_expr (src
))
11430 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11433 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11434 dest
, src
, len
, size
);
11435 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11436 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11437 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11444 /* Emit warning if a buffer overflow is detected at compile time. */
11447 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11451 location_t loc
= tree_nonartificial_location (exp
);
11455 case BUILT_IN_STRCPY_CHK
:
11456 case BUILT_IN_STPCPY_CHK
:
11457 /* For __strcat_chk the warning will be emitted only if overflowing
11458 by at least strlen (dest) + 1 bytes. */
11459 case BUILT_IN_STRCAT_CHK
:
11460 len
= CALL_EXPR_ARG (exp
, 1);
11461 size
= CALL_EXPR_ARG (exp
, 2);
11464 case BUILT_IN_STRNCAT_CHK
:
11465 case BUILT_IN_STRNCPY_CHK
:
11466 case BUILT_IN_STPNCPY_CHK
:
11467 len
= CALL_EXPR_ARG (exp
, 2);
11468 size
= CALL_EXPR_ARG (exp
, 3);
11470 case BUILT_IN_SNPRINTF_CHK
:
11471 case BUILT_IN_VSNPRINTF_CHK
:
11472 len
= CALL_EXPR_ARG (exp
, 1);
11473 size
= CALL_EXPR_ARG (exp
, 3);
11476 gcc_unreachable ();
11482 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11487 len
= c_strlen (len
, 1);
11488 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11491 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11493 tree src
= CALL_EXPR_ARG (exp
, 1);
11494 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11496 src
= c_strlen (src
, 1);
11497 if (! src
|| ! tree_fits_uhwi_p (src
))
11499 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11500 exp
, get_callee_fndecl (exp
));
11503 else if (tree_int_cst_lt (src
, size
))
11506 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11509 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11510 exp
, get_callee_fndecl (exp
));
11513 /* Emit warning if a buffer overflow is detected at compile time
11514 in __sprintf_chk/__vsprintf_chk calls. */
11517 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11519 tree size
, len
, fmt
;
11520 const char *fmt_str
;
11521 int nargs
= call_expr_nargs (exp
);
11523 /* Verify the required arguments in the original call. */
11527 size
= CALL_EXPR_ARG (exp
, 2);
11528 fmt
= CALL_EXPR_ARG (exp
, 3);
11530 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11533 /* Check whether the format is a literal string constant. */
11534 fmt_str
= c_getstr (fmt
);
11535 if (fmt_str
== NULL
)
11538 if (!init_target_chars ())
11541 /* If the format doesn't contain % args or %%, we know its size. */
11542 if (strchr (fmt_str
, target_percent
) == 0)
11543 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11544 /* If the format is "%s" and first ... argument is a string literal,
11546 else if (fcode
== BUILT_IN_SPRINTF_CHK
11547 && strcmp (fmt_str
, target_percent_s
) == 0)
11553 arg
= CALL_EXPR_ARG (exp
, 4);
11554 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11557 len
= c_strlen (arg
, 1);
11558 if (!len
|| ! tree_fits_uhwi_p (len
))
11564 if (! tree_int_cst_lt (len
, size
))
11565 warning_at (tree_nonartificial_location (exp
),
11566 0, "%Kcall to %D will always overflow destination buffer",
11567 exp
, get_callee_fndecl (exp
));
11570 /* Emit warning if a free is called with address of a variable. */
11573 maybe_emit_free_warning (tree exp
)
11575 tree arg
= CALL_EXPR_ARG (exp
, 0);
11578 if (TREE_CODE (arg
) != ADDR_EXPR
)
11581 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11582 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11585 if (SSA_VAR_P (arg
))
11586 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11587 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11589 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11590 "%Kattempt to free a non-heap object", exp
);
11593 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11597 fold_builtin_object_size (tree ptr
, tree ost
)
11599 unsigned HOST_WIDE_INT bytes
;
11600 int object_size_type
;
11602 if (!validate_arg (ptr
, POINTER_TYPE
)
11603 || !validate_arg (ost
, INTEGER_TYPE
))
11608 if (TREE_CODE (ost
) != INTEGER_CST
11609 || tree_int_cst_sgn (ost
) < 0
11610 || compare_tree_int (ost
, 3) > 0)
11613 object_size_type
= tree_to_shwi (ost
);
11615 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11616 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11617 and (size_t) 0 for types 2 and 3. */
11618 if (TREE_SIDE_EFFECTS (ptr
))
11619 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11621 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11623 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11624 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11625 return build_int_cstu (size_type_node
, bytes
);
11627 else if (TREE_CODE (ptr
) == SSA_NAME
)
11629 /* If object size is not known yet, delay folding until
11630 later. Maybe subsequent passes will help determining
11632 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11633 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11634 && wi::fits_to_tree_p (bytes
, size_type_node
))
11635 return build_int_cstu (size_type_node
, bytes
);
11641 /* Builtins with folding operations that operate on "..." arguments
11642 need special handling; we need to store the arguments in a convenient
11643 data structure before attempting any folding. Fortunately there are
11644 only a few builtins that fall into this category. FNDECL is the
11645 function, EXP is the CALL_EXPR for the call. */
11648 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11650 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11651 tree ret
= NULL_TREE
;
11655 case BUILT_IN_FPCLASSIFY
:
11656 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11664 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11665 SET_EXPR_LOCATION (ret
, loc
);
11666 TREE_NO_WARNING (ret
) = 1;
11672 /* Initialize format string characters in the target charset. */
11675 init_target_chars (void)
11680 target_newline
= lang_hooks
.to_target_charset ('\n');
11681 target_percent
= lang_hooks
.to_target_charset ('%');
11682 target_c
= lang_hooks
.to_target_charset ('c');
11683 target_s
= lang_hooks
.to_target_charset ('s');
11684 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11688 target_percent_c
[0] = target_percent
;
11689 target_percent_c
[1] = target_c
;
11690 target_percent_c
[2] = '\0';
11692 target_percent_s
[0] = target_percent
;
11693 target_percent_s
[1] = target_s
;
11694 target_percent_s
[2] = '\0';
11696 target_percent_s_newline
[0] = target_percent
;
11697 target_percent_s_newline
[1] = target_s
;
11698 target_percent_s_newline
[2] = target_newline
;
11699 target_percent_s_newline
[3] = '\0';
11706 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11707 and no overflow/underflow occurred. INEXACT is true if M was not
11708 exactly calculated. TYPE is the tree type for the result. This
11709 function assumes that you cleared the MPFR flags and then
11710 calculated M to see if anything subsequently set a flag prior to
11711 entering this function. Return NULL_TREE if any checks fail. */
11714 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11716 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11717 overflow/underflow occurred. If -frounding-math, proceed iff the
11718 result of calling FUNC was exact. */
11719 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11720 && (!flag_rounding_math
|| !inexact
))
11722 REAL_VALUE_TYPE rr
;
11724 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11725 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11726 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11727 but the mpft_t is not, then we underflowed in the
11729 if (real_isfinite (&rr
)
11730 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11732 REAL_VALUE_TYPE rmode
;
11734 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11735 /* Proceed iff the specified mode can hold the value. */
11736 if (real_identical (&rmode
, &rr
))
11737 return build_real (type
, rmode
);
11743 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11744 number and no overflow/underflow occurred. INEXACT is true if M
11745 was not exactly calculated. TYPE is the tree type for the result.
11746 This function assumes that you cleared the MPFR flags and then
11747 calculated M to see if anything subsequently set a flag prior to
11748 entering this function. Return NULL_TREE if any checks fail, if
11749 FORCE_CONVERT is true, then bypass the checks. */
11752 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11754 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11755 overflow/underflow occurred. If -frounding-math, proceed iff the
11756 result of calling FUNC was exact. */
11758 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11759 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11760 && (!flag_rounding_math
|| !inexact
)))
11762 REAL_VALUE_TYPE re
, im
;
11764 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11765 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11766 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11767 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11768 but the mpft_t is not, then we underflowed in the
11771 || (real_isfinite (&re
) && real_isfinite (&im
)
11772 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11773 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11775 REAL_VALUE_TYPE re_mode
, im_mode
;
11777 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11778 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11779 /* Proceed iff the specified mode can hold the value. */
11781 || (real_identical (&re_mode
, &re
)
11782 && real_identical (&im_mode
, &im
)))
11783 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11784 build_real (TREE_TYPE (type
), im_mode
));
11790 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11791 FUNC on it and return the resulting value as a tree with type TYPE.
11792 If MIN and/or MAX are not NULL, then the supplied ARG must be
11793 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11794 acceptable values, otherwise they are not. The mpfr precision is
11795 set to the precision of TYPE. We assume that function FUNC returns
11796 zero if the result could be calculated exactly within the requested
11800 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11801 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11804 tree result
= NULL_TREE
;
11808 /* To proceed, MPFR must exactly represent the target floating point
11809 format, which only happens when the target base equals two. */
11810 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11811 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11813 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11815 if (real_isfinite (ra
)
11816 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11817 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11819 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11820 const int prec
= fmt
->p
;
11821 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11825 mpfr_init2 (m
, prec
);
11826 mpfr_from_real (m
, ra
, GMP_RNDN
);
11827 mpfr_clear_flags ();
11828 inexact
= func (m
, m
, rnd
);
11829 result
= do_mpfr_ckconv (m
, type
, inexact
);
11837 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11838 FUNC on it and return the resulting value as a tree with type TYPE.
11839 The mpfr precision is set to the precision of TYPE. We assume that
11840 function FUNC returns zero if the result could be calculated
11841 exactly within the requested precision. */
11844 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11845 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11847 tree result
= NULL_TREE
;
11852 /* To proceed, MPFR must exactly represent the target floating point
11853 format, which only happens when the target base equals two. */
11854 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11855 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11856 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11858 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11859 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11861 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11863 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11864 const int prec
= fmt
->p
;
11865 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11869 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11870 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11871 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11872 mpfr_clear_flags ();
11873 inexact
= func (m1
, m1
, m2
, rnd
);
11874 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11875 mpfr_clears (m1
, m2
, NULL
);
11882 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11883 FUNC on it and return the resulting value as a tree with type TYPE.
11884 The mpfr precision is set to the precision of TYPE. We assume that
11885 function FUNC returns zero if the result could be calculated
11886 exactly within the requested precision. */
11889 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11890 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11892 tree result
= NULL_TREE
;
11898 /* To proceed, MPFR must exactly represent the target floating point
11899 format, which only happens when the target base equals two. */
11900 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11901 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11902 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11903 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11905 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11906 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11907 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11909 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11911 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11912 const int prec
= fmt
->p
;
11913 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11917 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11918 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11919 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11920 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11921 mpfr_clear_flags ();
11922 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11923 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11924 mpfr_clears (m1
, m2
, m3
, NULL
);
11931 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11932 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11933 If ARG_SINP and ARG_COSP are NULL then the result is returned
11934 as a complex value.
11935 The type is taken from the type of ARG and is used for setting the
11936 precision of the calculation and results. */
11939 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11941 tree
const type
= TREE_TYPE (arg
);
11942 tree result
= NULL_TREE
;
11946 /* To proceed, MPFR must exactly represent the target floating point
11947 format, which only happens when the target base equals two. */
11948 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11949 && TREE_CODE (arg
) == REAL_CST
11950 && !TREE_OVERFLOW (arg
))
11952 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11954 if (real_isfinite (ra
))
11956 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11957 const int prec
= fmt
->p
;
11958 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11959 tree result_s
, result_c
;
11963 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11964 mpfr_from_real (m
, ra
, GMP_RNDN
);
11965 mpfr_clear_flags ();
11966 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11967 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11968 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11969 mpfr_clears (m
, ms
, mc
, NULL
);
11970 if (result_s
&& result_c
)
11972 /* If we are to return in a complex value do so. */
11973 if (!arg_sinp
&& !arg_cosp
)
11974 return build_complex (build_complex_type (type
),
11975 result_c
, result_s
);
11977 /* Dereference the sin/cos pointer arguments. */
11978 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11979 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11980 /* Proceed if valid pointer type were passed in. */
11981 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11982 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11984 /* Set the values. */
11985 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11987 TREE_SIDE_EFFECTS (result_s
) = 1;
11988 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11990 TREE_SIDE_EFFECTS (result_c
) = 1;
11991 /* Combine the assignments into a compound expr. */
11992 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11993 result_s
, result_c
));
12001 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12002 two-argument mpfr order N Bessel function FUNC on them and return
12003 the resulting value as a tree with type TYPE. The mpfr precision
12004 is set to the precision of TYPE. We assume that function FUNC
12005 returns zero if the result could be calculated exactly within the
12006 requested precision. */
12008 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12009 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12010 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12012 tree result
= NULL_TREE
;
12017 /* To proceed, MPFR must exactly represent the target floating point
12018 format, which only happens when the target base equals two. */
12019 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12020 && tree_fits_shwi_p (arg1
)
12021 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12023 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12024 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12027 && real_isfinite (ra
)
12028 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12030 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12031 const int prec
= fmt
->p
;
12032 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12036 mpfr_init2 (m
, prec
);
12037 mpfr_from_real (m
, ra
, GMP_RNDN
);
12038 mpfr_clear_flags ();
12039 inexact
= func (m
, n
, m
, rnd
);
12040 result
= do_mpfr_ckconv (m
, type
, inexact
);
12048 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12049 the pointer *(ARG_QUO) and return the result. The type is taken
12050 from the type of ARG0 and is used for setting the precision of the
12051 calculation and results. */
12054 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12056 tree
const type
= TREE_TYPE (arg0
);
12057 tree result
= NULL_TREE
;
12062 /* To proceed, MPFR must exactly represent the target floating point
12063 format, which only happens when the target base equals two. */
12064 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12065 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12066 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12068 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12069 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12071 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12073 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12074 const int prec
= fmt
->p
;
12075 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12080 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12081 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12082 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12083 mpfr_clear_flags ();
12084 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12085 /* Remquo is independent of the rounding mode, so pass
12086 inexact=0 to do_mpfr_ckconv(). */
12087 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12088 mpfr_clears (m0
, m1
, NULL
);
12091 /* MPFR calculates quo in the host's long so it may
12092 return more bits in quo than the target int can hold
12093 if sizeof(host long) > sizeof(target int). This can
12094 happen even for native compilers in LP64 mode. In
12095 these cases, modulo the quo value with the largest
12096 number that the target int can hold while leaving one
12097 bit for the sign. */
12098 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12099 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12101 /* Dereference the quo pointer argument. */
12102 arg_quo
= build_fold_indirect_ref (arg_quo
);
12103 /* Proceed iff a valid pointer type was passed in. */
12104 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12106 /* Set the value. */
12108 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12109 build_int_cst (TREE_TYPE (arg_quo
),
12111 TREE_SIDE_EFFECTS (result_quo
) = 1;
12112 /* Combine the quo assignment with the rem. */
12113 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12114 result_quo
, result_rem
));
12122 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12123 resulting value as a tree with type TYPE. The mpfr precision is
12124 set to the precision of TYPE. We assume that this mpfr function
12125 returns zero if the result could be calculated exactly within the
12126 requested precision. In addition, the integer pointer represented
12127 by ARG_SG will be dereferenced and set to the appropriate signgam
12131 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12133 tree result
= NULL_TREE
;
12137 /* To proceed, MPFR must exactly represent the target floating point
12138 format, which only happens when the target base equals two. Also
12139 verify ARG is a constant and that ARG_SG is an int pointer. */
12140 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12141 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12142 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12143 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12145 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12147 /* In addition to NaN and Inf, the argument cannot be zero or a
12148 negative integer. */
12149 if (real_isfinite (ra
)
12150 && ra
->cl
!= rvc_zero
12151 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12153 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12154 const int prec
= fmt
->p
;
12155 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12160 mpfr_init2 (m
, prec
);
12161 mpfr_from_real (m
, ra
, GMP_RNDN
);
12162 mpfr_clear_flags ();
12163 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12164 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12170 /* Dereference the arg_sg pointer argument. */
12171 arg_sg
= build_fold_indirect_ref (arg_sg
);
12172 /* Assign the signgam value into *arg_sg. */
12173 result_sg
= fold_build2 (MODIFY_EXPR
,
12174 TREE_TYPE (arg_sg
), arg_sg
,
12175 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12176 TREE_SIDE_EFFECTS (result_sg
) = 1;
12177 /* Combine the signgam assignment with the lgamma result. */
12178 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12179 result_sg
, result_lg
));
12187 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12188 function FUNC on it and return the resulting value as a tree with
12189 type TYPE. The mpfr precision is set to the precision of TYPE. We
12190 assume that function FUNC returns zero if the result could be
12191 calculated exactly within the requested precision. */
12194 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12196 tree result
= NULL_TREE
;
12200 /* To proceed, MPFR must exactly represent the target floating point
12201 format, which only happens when the target base equals two. */
12202 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12203 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12204 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12206 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12207 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12209 if (real_isfinite (re
) && real_isfinite (im
))
12211 const struct real_format
*const fmt
=
12212 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12213 const int prec
= fmt
->p
;
12214 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12215 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12219 mpc_init2 (m
, prec
);
12220 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12221 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12222 mpfr_clear_flags ();
12223 inexact
= func (m
, m
, crnd
);
12224 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12232 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12233 mpc function FUNC on it and return the resulting value as a tree
12234 with type TYPE. The mpfr precision is set to the precision of
12235 TYPE. We assume that function FUNC returns zero if the result
12236 could be calculated exactly within the requested precision. If
12237 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12238 in the arguments and/or results. */
12241 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12242 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12244 tree result
= NULL_TREE
;
12249 /* To proceed, MPFR must exactly represent the target floating point
12250 format, which only happens when the target base equals two. */
12251 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12252 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12253 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12254 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12255 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12257 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12258 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12259 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12260 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12263 || (real_isfinite (re0
) && real_isfinite (im0
)
12264 && real_isfinite (re1
) && real_isfinite (im1
)))
12266 const struct real_format
*const fmt
=
12267 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12268 const int prec
= fmt
->p
;
12269 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12270 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12274 mpc_init2 (m0
, prec
);
12275 mpc_init2 (m1
, prec
);
12276 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12277 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12278 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12279 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12280 mpfr_clear_flags ();
12281 inexact
= func (m0
, m0
, m1
, crnd
);
12282 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12291 /* A wrapper function for builtin folding that prevents warnings for
12292 "statement without effect" and the like, caused by removing the
12293 call node earlier than the warning is generated. */
12296 fold_call_stmt (gcall
*stmt
, bool ignore
)
12298 tree ret
= NULL_TREE
;
12299 tree fndecl
= gimple_call_fndecl (stmt
);
12300 location_t loc
= gimple_location (stmt
);
12302 && TREE_CODE (fndecl
) == FUNCTION_DECL
12303 && DECL_BUILT_IN (fndecl
)
12304 && !gimple_call_va_arg_pack_p (stmt
))
12306 int nargs
= gimple_call_num_args (stmt
);
12307 tree
*args
= (nargs
> 0
12308 ? gimple_call_arg_ptr (stmt
, 0)
12309 : &error_mark_node
);
12311 if (avoid_folding_inline_builtin (fndecl
))
12313 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12315 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12319 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12322 /* Propagate location information from original call to
12323 expansion of builtin. Otherwise things like
12324 maybe_emit_chk_warning, that operate on the expansion
12325 of a builtin, will use the wrong location information. */
12326 if (gimple_has_location (stmt
))
12328 tree realret
= ret
;
12329 if (TREE_CODE (ret
) == NOP_EXPR
)
12330 realret
= TREE_OPERAND (ret
, 0);
12331 if (CAN_HAVE_LOCATION_P (realret
)
12332 && !EXPR_HAS_LOCATION (realret
))
12333 SET_EXPR_LOCATION (realret
, loc
);
12343 /* Look up the function in builtin_decl that corresponds to DECL
12344 and set ASMSPEC as its user assembler name. DECL must be a
12345 function decl that declares a builtin. */
12348 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12351 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12352 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12355 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12356 set_user_assembler_name (builtin
, asmspec
);
12357 switch (DECL_FUNCTION_CODE (decl
))
12359 case BUILT_IN_MEMCPY
:
12360 init_block_move_fn (asmspec
);
12361 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12363 case BUILT_IN_MEMSET
:
12364 init_block_clear_fn (asmspec
);
12365 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12367 case BUILT_IN_MEMMOVE
:
12368 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12370 case BUILT_IN_MEMCMP
:
12371 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12373 case BUILT_IN_ABORT
:
12374 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12377 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12379 set_user_assembler_libfunc ("ffs", asmspec
);
12380 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12381 MODE_INT
, 0), "ffs");
12389 /* Return true if DECL is a builtin that expands to a constant or similarly
12392 is_simple_builtin (tree decl
)
12394 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12395 switch (DECL_FUNCTION_CODE (decl
))
12397 /* Builtins that expand to constants. */
12398 case BUILT_IN_CONSTANT_P
:
12399 case BUILT_IN_EXPECT
:
12400 case BUILT_IN_OBJECT_SIZE
:
12401 case BUILT_IN_UNREACHABLE
:
12402 /* Simple register moves or loads from stack. */
12403 case BUILT_IN_ASSUME_ALIGNED
:
12404 case BUILT_IN_RETURN_ADDRESS
:
12405 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12406 case BUILT_IN_FROB_RETURN_ADDR
:
12407 case BUILT_IN_RETURN
:
12408 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12409 case BUILT_IN_FRAME_ADDRESS
:
12410 case BUILT_IN_VA_END
:
12411 case BUILT_IN_STACK_SAVE
:
12412 case BUILT_IN_STACK_RESTORE
:
12413 /* Exception state returns or moves registers around. */
12414 case BUILT_IN_EH_FILTER
:
12415 case BUILT_IN_EH_POINTER
:
12416 case BUILT_IN_EH_COPY_VALUES
:
12426 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12427 most probably expanded inline into reasonably simple code. This is a
12428 superset of is_simple_builtin. */
12430 is_inexpensive_builtin (tree decl
)
12434 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12436 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12437 switch (DECL_FUNCTION_CODE (decl
))
12440 case BUILT_IN_ALLOCA
:
12441 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12442 case BUILT_IN_BSWAP16
:
12443 case BUILT_IN_BSWAP32
:
12444 case BUILT_IN_BSWAP64
:
12446 case BUILT_IN_CLZIMAX
:
12447 case BUILT_IN_CLZL
:
12448 case BUILT_IN_CLZLL
:
12450 case BUILT_IN_CTZIMAX
:
12451 case BUILT_IN_CTZL
:
12452 case BUILT_IN_CTZLL
:
12454 case BUILT_IN_FFSIMAX
:
12455 case BUILT_IN_FFSL
:
12456 case BUILT_IN_FFSLL
:
12457 case BUILT_IN_IMAXABS
:
12458 case BUILT_IN_FINITE
:
12459 case BUILT_IN_FINITEF
:
12460 case BUILT_IN_FINITEL
:
12461 case BUILT_IN_FINITED32
:
12462 case BUILT_IN_FINITED64
:
12463 case BUILT_IN_FINITED128
:
12464 case BUILT_IN_FPCLASSIFY
:
12465 case BUILT_IN_ISFINITE
:
12466 case BUILT_IN_ISINF_SIGN
:
12467 case BUILT_IN_ISINF
:
12468 case BUILT_IN_ISINFF
:
12469 case BUILT_IN_ISINFL
:
12470 case BUILT_IN_ISINFD32
:
12471 case BUILT_IN_ISINFD64
:
12472 case BUILT_IN_ISINFD128
:
12473 case BUILT_IN_ISNAN
:
12474 case BUILT_IN_ISNANF
:
12475 case BUILT_IN_ISNANL
:
12476 case BUILT_IN_ISNAND32
:
12477 case BUILT_IN_ISNAND64
:
12478 case BUILT_IN_ISNAND128
:
12479 case BUILT_IN_ISNORMAL
:
12480 case BUILT_IN_ISGREATER
:
12481 case BUILT_IN_ISGREATEREQUAL
:
12482 case BUILT_IN_ISLESS
:
12483 case BUILT_IN_ISLESSEQUAL
:
12484 case BUILT_IN_ISLESSGREATER
:
12485 case BUILT_IN_ISUNORDERED
:
12486 case BUILT_IN_VA_ARG_PACK
:
12487 case BUILT_IN_VA_ARG_PACK_LEN
:
12488 case BUILT_IN_VA_COPY
:
12489 case BUILT_IN_TRAP
:
12490 case BUILT_IN_SAVEREGS
:
12491 case BUILT_IN_POPCOUNTL
:
12492 case BUILT_IN_POPCOUNTLL
:
12493 case BUILT_IN_POPCOUNTIMAX
:
12494 case BUILT_IN_POPCOUNT
:
12495 case BUILT_IN_PARITYL
:
12496 case BUILT_IN_PARITYLL
:
12497 case BUILT_IN_PARITYIMAX
:
12498 case BUILT_IN_PARITY
:
12499 case BUILT_IN_LABS
:
12500 case BUILT_IN_LLABS
:
12501 case BUILT_IN_PREFETCH
:
12502 case BUILT_IN_ACC_ON_DEVICE
:
12506 return is_simple_builtin (decl
);