1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
40 #include "tree-object-size.h"
44 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-expr.h"
56 #include "insn-config.h"
57 #include "statistics.h"
59 #include "fixed-value.h"
66 #include "insn-codes.h"
71 #include "typeclass.h"
74 #include "langhooks.h"
75 #include "tree-ssanames.h"
77 #include "value-prof.h"
78 #include "diagnostic-core.h"
83 #include "lto-streamer.h"
85 #include "tree-chkp.h"
87 #include "gomp-constants.h"
90 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
92 struct target_builtins default_target_builtins
;
94 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
97 /* Define the names of the builtin function types and codes. */
98 const char *const built_in_class_names
[BUILT_IN_LAST
]
99 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
101 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
102 const char * built_in_names
[(int) END_BUILTINS
] =
104 #include "builtins.def"
108 /* Setup an array of builtin_info_type, make sure each element decl is
109 initialized to NULL_TREE. */
110 builtin_info_type builtin_info
[(int)END_BUILTINS
];
112 /* Non-zero if __builtin_constant_p should be folded right away. */
113 bool force_folding_builtin_constant_p
;
115 static rtx
c_readstr (const char *, machine_mode
);
116 static int target_char_cast (tree
, char *);
117 static rtx
get_memory_rtx (tree
, tree
);
118 static int apply_args_size (void);
119 static int apply_result_size (void);
120 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
121 static rtx
result_vector (int, rtx
);
123 static void expand_builtin_update_setjmp_buf (rtx
);
124 static void expand_builtin_prefetch (tree
);
125 static rtx
expand_builtin_apply_args (void);
126 static rtx
expand_builtin_apply_args_1 (void);
127 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
128 static void expand_builtin_return (rtx
);
129 static enum type_class
type_to_class (tree
);
130 static rtx
expand_builtin_classify_type (tree
);
131 static void expand_errno_check (tree
, rtx
);
132 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
133 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
134 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
135 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
136 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
137 static rtx
expand_builtin_sincos (tree
);
138 static rtx
expand_builtin_cexpi (tree
, rtx
);
139 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
140 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
141 static rtx
expand_builtin_next_arg (void);
142 static rtx
expand_builtin_va_start (tree
);
143 static rtx
expand_builtin_va_end (tree
);
144 static rtx
expand_builtin_va_copy (tree
);
145 static rtx
expand_builtin_memcmp (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_strcmp (tree
, rtx
);
147 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
148 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
149 static rtx
expand_builtin_memcpy (tree
, rtx
);
150 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
151 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
152 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
153 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
154 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
155 machine_mode
, int, tree
);
156 static rtx
expand_builtin_strcpy (tree
, rtx
);
157 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
158 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
159 static rtx
expand_builtin_strncpy (tree
, rtx
);
160 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
161 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
162 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
163 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
164 static rtx
expand_builtin_bzero (tree
);
165 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
166 static rtx
expand_builtin_alloca (tree
, bool);
167 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
168 static rtx
expand_builtin_frame_address (tree
, tree
);
169 static tree
stabilize_va_list_loc (location_t
, tree
, int);
170 static rtx
expand_builtin_expect (tree
, rtx
);
171 static tree
fold_builtin_constant_p (tree
);
172 static tree
fold_builtin_classify_type (tree
);
173 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
174 static tree
fold_builtin_inf (location_t
, tree
, int);
175 static tree
fold_builtin_nan (tree
, tree
, int);
176 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
177 static bool validate_arg (const_tree
, enum tree_code code
);
178 static bool integer_valued_real_p (tree
);
179 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
180 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
181 static rtx
expand_builtin_signbit (tree
, rtx
);
182 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
183 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
184 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
185 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
186 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
187 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
188 static tree
fold_builtin_tan (tree
, tree
);
189 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
190 static tree
fold_builtin_floor (location_t
, tree
, tree
);
191 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
192 static tree
fold_builtin_round (location_t
, tree
, tree
);
193 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
194 static tree
fold_builtin_bitop (tree
, tree
);
195 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
196 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
197 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
198 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
199 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
200 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
201 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
202 static tree
fold_builtin_isascii (location_t
, tree
);
203 static tree
fold_builtin_toascii (location_t
, tree
);
204 static tree
fold_builtin_isdigit (location_t
, tree
);
205 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
206 static tree
fold_builtin_abs (location_t
, tree
, tree
);
207 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
209 static tree
fold_builtin_0 (location_t
, tree
);
210 static tree
fold_builtin_1 (location_t
, tree
, tree
);
211 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
212 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
213 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
215 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
216 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
217 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
218 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
219 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
221 static rtx
expand_builtin_object_size (tree
);
222 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
223 enum built_in_function
);
224 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
225 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
226 static void maybe_emit_free_warning (tree
);
227 static tree
fold_builtin_object_size (tree
, tree
);
229 unsigned HOST_WIDE_INT target_newline
;
230 unsigned HOST_WIDE_INT target_percent
;
231 static unsigned HOST_WIDE_INT target_c
;
232 static unsigned HOST_WIDE_INT target_s
;
233 char target_percent_c
[3];
234 char target_percent_s
[3];
235 char target_percent_s_newline
[4];
236 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
237 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
238 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
239 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
240 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
241 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
242 static tree
do_mpfr_sincos (tree
, tree
, tree
);
243 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
244 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
245 const REAL_VALUE_TYPE
*, bool);
246 static tree
do_mpfr_remquo (tree
, tree
, tree
);
247 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
248 static void expand_builtin_sync_synchronize (void);
250 /* Return true if NAME starts with __builtin_ or __sync_. */
253 is_builtin_name (const char *name
)
255 if (strncmp (name
, "__builtin_", 10) == 0)
257 if (strncmp (name
, "__sync_", 7) == 0)
259 if (strncmp (name
, "__atomic_", 9) == 0)
262 && (!strcmp (name
, "__cilkrts_detach")
263 || !strcmp (name
, "__cilkrts_pop_frame")))
269 /* Return true if DECL is a function symbol representing a built-in. */
272 is_builtin_fn (tree decl
)
274 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
277 /* Return true if NODE should be considered for inline expansion regardless
278 of the optimization level. This means whenever a function is invoked with
279 its "internal" name, which normally contains the prefix "__builtin". */
282 called_as_built_in (tree node
)
284 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285 we want the name used to call the function, not the name it
287 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
288 return is_builtin_name (name
);
291 /* Compute values M and N such that M divides (address of EXP - N) and such
292 that N < M. If these numbers can be determined, store M in alignp and N in
293 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
294 *alignp and any bit-offset to *bitposp.
296 Note that the address (and thus the alignment) computed here is based
297 on the address to which a symbol resolves, whereas DECL_ALIGN is based
298 on the address at which an object is actually located. These two
299 addresses are not always the same. For example, on ARM targets,
300 the address &foo of a Thumb function foo() has the lowest bit set,
301 whereas foo() itself starts on an even address.
303 If ADDR_P is true we are taking the address of the memory reference EXP
304 and thus cannot rely on the access taking place. */
307 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
308 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
310 HOST_WIDE_INT bitsize
, bitpos
;
313 int unsignedp
, volatilep
;
314 unsigned int align
= BITS_PER_UNIT
;
315 bool known_alignment
= false;
317 /* Get the innermost object and the constant (bitpos) and possibly
318 variable (offset) offset of the access. */
319 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
320 &mode
, &unsignedp
, &volatilep
, true);
322 /* Extract alignment information from the innermost object and
323 possibly adjust bitpos and offset. */
324 if (TREE_CODE (exp
) == FUNCTION_DECL
)
326 /* Function addresses can encode extra information besides their
327 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 allows the low bit to be used as a virtual bit, we know
329 that the address itself must be at least 2-byte aligned. */
330 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
331 align
= 2 * BITS_PER_UNIT
;
333 else if (TREE_CODE (exp
) == LABEL_DECL
)
335 else if (TREE_CODE (exp
) == CONST_DECL
)
337 /* The alignment of a CONST_DECL is determined by its initializer. */
338 exp
= DECL_INITIAL (exp
);
339 align
= TYPE_ALIGN (TREE_TYPE (exp
));
340 #ifdef CONSTANT_ALIGNMENT
341 if (CONSTANT_CLASS_P (exp
))
342 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
344 known_alignment
= true;
346 else if (DECL_P (exp
))
348 align
= DECL_ALIGN (exp
);
349 known_alignment
= true;
351 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
353 align
= TYPE_ALIGN (TREE_TYPE (exp
));
355 else if (TREE_CODE (exp
) == INDIRECT_REF
356 || TREE_CODE (exp
) == MEM_REF
357 || TREE_CODE (exp
) == TARGET_MEM_REF
)
359 tree addr
= TREE_OPERAND (exp
, 0);
361 unsigned HOST_WIDE_INT ptr_bitpos
;
362 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
364 /* If the address is explicitely aligned, handle that. */
365 if (TREE_CODE (addr
) == BIT_AND_EXPR
366 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
368 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
369 ptr_bitmask
*= BITS_PER_UNIT
;
370 align
= ptr_bitmask
& -ptr_bitmask
;
371 addr
= TREE_OPERAND (addr
, 0);
375 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
376 align
= MAX (ptr_align
, align
);
378 /* Re-apply explicit alignment to the bitpos. */
379 ptr_bitpos
&= ptr_bitmask
;
381 /* The alignment of the pointer operand in a TARGET_MEM_REF
382 has to take the variable offset parts into account. */
383 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
387 unsigned HOST_WIDE_INT step
= 1;
389 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
390 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
392 if (TMR_INDEX2 (exp
))
393 align
= BITS_PER_UNIT
;
394 known_alignment
= false;
397 /* When EXP is an actual memory reference then we can use
398 TYPE_ALIGN of a pointer indirection to derive alignment.
399 Do so only if get_pointer_alignment_1 did not reveal absolute
400 alignment knowledge and if using that alignment would
401 improve the situation. */
402 if (!addr_p
&& !known_alignment
403 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
404 align
= TYPE_ALIGN (TREE_TYPE (exp
));
407 /* Else adjust bitpos accordingly. */
408 bitpos
+= ptr_bitpos
;
409 if (TREE_CODE (exp
) == MEM_REF
410 || TREE_CODE (exp
) == TARGET_MEM_REF
)
411 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
414 else if (TREE_CODE (exp
) == STRING_CST
)
416 /* STRING_CST are the only constant objects we allow to be not
417 wrapped inside a CONST_DECL. */
418 align
= TYPE_ALIGN (TREE_TYPE (exp
));
419 #ifdef CONSTANT_ALIGNMENT
420 if (CONSTANT_CLASS_P (exp
))
421 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
423 known_alignment
= true;
426 /* If there is a non-constant offset part extract the maximum
427 alignment that can prevail. */
430 unsigned int trailing_zeros
= tree_ctz (offset
);
431 if (trailing_zeros
< HOST_BITS_PER_INT
)
433 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
435 align
= MIN (align
, inner
);
440 *bitposp
= bitpos
& (*alignp
- 1);
441 return known_alignment
;
444 /* For a memory reference expression EXP compute values M and N such that M
445 divides (&EXP - N) and such that N < M. If these numbers can be determined,
446 store M in alignp and N in *BITPOSP and return true. Otherwise return false
447 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
450 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
451 unsigned HOST_WIDE_INT
*bitposp
)
453 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
456 /* Return the alignment in bits of EXP, an object. */
459 get_object_alignment (tree exp
)
461 unsigned HOST_WIDE_INT bitpos
= 0;
464 get_object_alignment_1 (exp
, &align
, &bitpos
);
466 /* align and bitpos now specify known low bits of the pointer.
467 ptr & (align - 1) == bitpos. */
470 align
= (bitpos
& -bitpos
);
474 /* For a pointer valued expression EXP compute values M and N such that M
475 divides (EXP - N) and such that N < M. If these numbers can be determined,
476 store M in alignp and N in *BITPOSP and return true. Return false if
477 the results are just a conservative approximation.
479 If EXP is not a pointer, false is returned too. */
482 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
483 unsigned HOST_WIDE_INT
*bitposp
)
487 if (TREE_CODE (exp
) == ADDR_EXPR
)
488 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
489 alignp
, bitposp
, true);
490 else if (TREE_CODE (exp
) == SSA_NAME
491 && POINTER_TYPE_P (TREE_TYPE (exp
)))
493 unsigned int ptr_align
, ptr_misalign
;
494 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
496 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
498 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
499 *alignp
= ptr_align
* BITS_PER_UNIT
;
500 /* We cannot really tell whether this result is an approximation. */
506 *alignp
= BITS_PER_UNIT
;
510 else if (TREE_CODE (exp
) == INTEGER_CST
)
512 *alignp
= BIGGEST_ALIGNMENT
;
513 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
514 & (BIGGEST_ALIGNMENT
- 1));
519 *alignp
= BITS_PER_UNIT
;
523 /* Return the alignment in bits of EXP, a pointer valued expression.
524 The alignment returned is, by default, the alignment of the thing that
525 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
527 Otherwise, look at the expression to see if we can do better, i.e., if the
528 expression is actually pointing at an object whose alignment is tighter. */
531 get_pointer_alignment (tree exp
)
533 unsigned HOST_WIDE_INT bitpos
= 0;
536 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
538 /* align and bitpos now specify known low bits of the pointer.
539 ptr & (align - 1) == bitpos. */
542 align
= (bitpos
& -bitpos
);
547 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
548 way, because it could contain a zero byte in the middle.
549 TREE_STRING_LENGTH is the size of the character array, not the string.
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
562 The value returned is of type `ssizetype'.
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
568 c_strlen (tree src
, int only_value
)
571 HOST_WIDE_INT offset
;
577 if (TREE_CODE (src
) == COND_EXPR
578 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
582 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
583 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
584 if (tree_int_cst_equal (len1
, len2
))
588 if (TREE_CODE (src
) == COMPOUND_EXPR
589 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
590 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
592 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
594 src
= string_constant (src
, &offset_node
);
598 max
= TREE_STRING_LENGTH (src
) - 1;
599 ptr
= TREE_STRING_POINTER (src
);
601 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
608 for (i
= 0; i
< max
; i
++)
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
619 return size_diffop_loc (loc
, size_int (max
), offset_node
);
622 /* We have a known offset into the string. Start searching there for
623 a null character if we can represent it as a single HOST_WIDE_INT. */
624 if (offset_node
== 0)
626 else if (! tree_fits_shwi_p (offset_node
))
629 offset
= tree_to_shwi (offset_node
);
631 /* If the offset is known to be out of bounds, warn, and call strlen at
633 if (offset
< 0 || offset
> max
)
635 /* Suppress multiple warnings for propagated constant strings. */
637 && !TREE_NO_WARNING (src
))
639 warning_at (loc
, 0, "offset outside bounds of constant string");
640 TREE_NO_WARNING (src
) = 1;
645 /* Use strlen to search for the first zero byte. Since any strings
646 constructed with build_string will have nulls appended, we win even
647 if we get handed something like (char[4])"abcd".
649 Since OFFSET is our starting index into the string, no further
650 calculation is needed. */
651 return ssize_int (strlen (ptr
+ offset
));
654 /* Return a char pointer for a C string if it is a string constant
655 or sum of string constant and integer constant. */
662 src
= string_constant (src
, &offset_node
);
666 if (offset_node
== 0)
667 return TREE_STRING_POINTER (src
);
668 else if (!tree_fits_uhwi_p (offset_node
)
669 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
672 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
675 /* Return a constant integer corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
679 c_readstr (const char *str
, machine_mode mode
)
683 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
685 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
686 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
687 / HOST_BITS_PER_WIDE_INT
;
689 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
690 for (i
= 0; i
< len
; i
++)
694 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
697 if (WORDS_BIG_ENDIAN
)
698 j
= GET_MODE_SIZE (mode
) - i
- 1;
699 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
700 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
701 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
705 ch
= (unsigned char) str
[i
];
706 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
709 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
710 return immed_wide_int_const (c
, mode
);
713 /* Cast a target constant CST to target CHAR and if that value fits into
714 host char type, return zero and put that value into variable pointed to by
718 target_char_cast (tree cst
, char *p
)
720 unsigned HOST_WIDE_INT val
, hostval
;
722 if (TREE_CODE (cst
) != INTEGER_CST
723 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
726 /* Do not care if it fits or not right here. */
727 val
= TREE_INT_CST_LOW (cst
);
729 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
730 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
733 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
734 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
743 /* Similar to save_expr, but assumes that arbitrary code is not executed
744 in between the multiple evaluations. In particular, we assume that a
745 non-addressable local variable will not be modified. */
748 builtin_save_expr (tree exp
)
750 if (TREE_CODE (exp
) == SSA_NAME
751 || (TREE_ADDRESSABLE (exp
) == 0
752 && (TREE_CODE (exp
) == PARM_DECL
753 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
756 return save_expr (exp
);
759 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
760 times to get the address of either a higher stack frame, or a return
761 address located within it (depending on FNDECL_CODE). */
764 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
768 #ifdef INITIAL_FRAME_ADDRESS_RTX
769 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
773 /* For a zero count with __builtin_return_address, we don't care what
774 frame address we return, because target-specific definitions will
775 override us. Therefore frame pointer elimination is OK, and using
776 the soft frame pointer is OK.
778 For a nonzero count, or a zero count with __builtin_frame_address,
779 we require a stable offset from the current frame pointer to the
780 previous one, so we must use the hard frame pointer, and
781 we must disable frame pointer elimination. */
782 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
783 tem
= frame_pointer_rtx
;
786 tem
= hard_frame_pointer_rtx
;
788 /* Tell reload not to eliminate the frame pointer. */
789 crtl
->accesses_prior_frames
= 1;
793 /* Some machines need special handling before we can access
794 arbitrary frames. For example, on the SPARC, we must first flush
795 all register windows to the stack. */
796 #ifdef SETUP_FRAME_ADDRESSES
798 SETUP_FRAME_ADDRESSES ();
801 /* On the SPARC, the return address is not in the frame, it is in a
802 register. There is no way to access it off of the current frame
803 pointer, but it can be accessed off the previous frame pointer by
804 reading the value from the register window save area. */
805 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
808 /* Scan back COUNT frames to the specified frame. */
809 for (i
= 0; i
< count
; i
++)
811 /* Assume the dynamic chain pointer is in the word that the
812 frame address points to, unless otherwise specified. */
813 #ifdef DYNAMIC_CHAIN_ADDRESS
814 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
816 tem
= memory_address (Pmode
, tem
);
817 tem
= gen_frame_mem (Pmode
, tem
);
818 tem
= copy_to_reg (tem
);
821 /* For __builtin_frame_address, return what we've got. But, on
822 the SPARC for example, we may have to add a bias. */
823 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
824 #ifdef FRAME_ADDR_RTX
825 return FRAME_ADDR_RTX (tem
);
830 /* For __builtin_return_address, get the return address from that frame. */
831 #ifdef RETURN_ADDR_RTX
832 tem
= RETURN_ADDR_RTX (count
, tem
);
834 tem
= memory_address (Pmode
,
835 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
836 tem
= gen_frame_mem (Pmode
, tem
);
841 /* Alias set used for setjmp buffer. */
842 static alias_set_type setjmp_alias_set
= -1;
844 /* Construct the leading half of a __builtin_setjmp call. Control will
845 return to RECEIVER_LABEL. This is also called directly by the SJLJ
846 exception handling code. */
849 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
851 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
855 if (setjmp_alias_set
== -1)
856 setjmp_alias_set
= new_alias_set ();
858 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
860 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
862 /* We store the frame pointer and the address of receiver_label in
863 the buffer and use the rest of it for the stack save area, which
864 is machine-dependent. */
866 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
867 set_mem_alias_set (mem
, setjmp_alias_set
);
868 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
870 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
871 GET_MODE_SIZE (Pmode
))),
872 set_mem_alias_set (mem
, setjmp_alias_set
);
874 emit_move_insn (validize_mem (mem
),
875 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
877 stack_save
= gen_rtx_MEM (sa_mode
,
878 plus_constant (Pmode
, buf_addr
,
879 2 * GET_MODE_SIZE (Pmode
)));
880 set_mem_alias_set (stack_save
, setjmp_alias_set
);
881 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
883 /* If there is further processing to do, do it. */
884 #ifdef HAVE_builtin_setjmp_setup
885 if (HAVE_builtin_setjmp_setup
)
886 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
889 /* We have a nonlocal label. */
890 cfun
->has_nonlocal_label
= 1;
893 /* Construct the trailing part of a __builtin_setjmp call. This is
894 also called directly by the SJLJ exception handling code.
895 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
898 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
902 /* Mark the FP as used when we get here, so we have to make sure it's
903 marked as used by this function. */
904 emit_use (hard_frame_pointer_rtx
);
906 /* Mark the static chain as clobbered here so life information
907 doesn't get messed up for it. */
908 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
909 if (chain
&& REG_P (chain
))
910 emit_clobber (chain
);
912 /* Now put in the code to restore the frame pointer, and argument
913 pointer, if needed. */
914 #ifdef HAVE_nonlocal_goto
915 if (! HAVE_nonlocal_goto
)
918 /* First adjust our frame pointer to its actual value. It was
919 previously set to the start of the virtual area corresponding to
920 the stacked variables when we branched here and now needs to be
921 adjusted to the actual hardware fp value.
923 Assignments to virtual registers are converted by
924 instantiate_virtual_regs into the corresponding assignment
925 to the underlying register (fp in this case) that makes
926 the original assignment true.
927 So the following insn will actually be decrementing fp by
928 STARTING_FRAME_OFFSET. */
929 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
931 /* Restoring the frame pointer also modifies the hard frame pointer.
932 Mark it used (so that the previous assignment remains live once
933 the frame pointer is eliminated) and clobbered (to represent the
934 implicit update from the assignment). */
935 emit_use (hard_frame_pointer_rtx
);
936 emit_clobber (hard_frame_pointer_rtx
);
939 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
940 if (fixed_regs
[ARG_POINTER_REGNUM
])
942 #ifdef ELIMINABLE_REGS
943 /* If the argument pointer can be eliminated in favor of the
944 frame pointer, we don't need to restore it. We assume here
945 that if such an elimination is present, it can always be used.
946 This is the case on all known machines; if we don't make this
947 assumption, we do unnecessary saving on many machines. */
949 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
951 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
952 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
953 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
956 if (i
== ARRAY_SIZE (elim_regs
))
959 /* Now restore our arg pointer from the address at which it
960 was saved in our stack frame. */
961 emit_move_insn (crtl
->args
.internal_arg_pointer
,
962 copy_to_reg (get_arg_pointer_save_area ()));
967 #ifdef HAVE_builtin_setjmp_receiver
968 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
969 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
972 #ifdef HAVE_nonlocal_goto_receiver
973 if (HAVE_nonlocal_goto_receiver
)
974 emit_insn (gen_nonlocal_goto_receiver ());
979 /* We must not allow the code we just generated to be reordered by
980 scheduling. Specifically, the update of the frame pointer must
981 happen immediately, not later. */
982 emit_insn (gen_blockage ());
985 /* __builtin_longjmp is passed a pointer to an array of five words (not
986 all will be used on all machines). It operates similarly to the C
987 library function of the same name, but is more efficient. Much of
988 the code below is copied from the handling of non-local gotos. */
991 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
994 rtx_insn
*insn
, *last
;
995 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
997 /* DRAP is needed for stack realign if longjmp is expanded to current
999 if (SUPPORTS_STACK_ALIGNMENT
)
1000 crtl
->need_drap
= true;
1002 if (setjmp_alias_set
== -1)
1003 setjmp_alias_set
= new_alias_set ();
1005 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1007 buf_addr
= force_reg (Pmode
, buf_addr
);
1009 /* We require that the user must pass a second argument of 1, because
1010 that is what builtin_setjmp will return. */
1011 gcc_assert (value
== const1_rtx
);
1013 last
= get_last_insn ();
1014 #ifdef HAVE_builtin_longjmp
1015 if (HAVE_builtin_longjmp
)
1016 emit_insn (gen_builtin_longjmp (buf_addr
));
1020 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1021 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1022 GET_MODE_SIZE (Pmode
)));
1024 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1025 2 * GET_MODE_SIZE (Pmode
)));
1026 set_mem_alias_set (fp
, setjmp_alias_set
);
1027 set_mem_alias_set (lab
, setjmp_alias_set
);
1028 set_mem_alias_set (stack
, setjmp_alias_set
);
1030 /* Pick up FP, label, and SP from the block and jump. This code is
1031 from expand_goto in stmt.c; see there for detailed comments. */
1032 #ifdef HAVE_nonlocal_goto
1033 if (HAVE_nonlocal_goto
)
1034 /* We have to pass a value to the nonlocal_goto pattern that will
1035 get copied into the static_chain pointer, but it does not matter
1036 what that value is, because builtin_setjmp does not use it. */
1037 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1041 lab
= copy_to_reg (lab
);
1043 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1044 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1046 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1047 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1049 emit_use (hard_frame_pointer_rtx
);
1050 emit_use (stack_pointer_rtx
);
1051 emit_indirect_jump (lab
);
1055 /* Search backwards and mark the jump insn as a non-local goto.
1056 Note that this precludes the use of __builtin_longjmp to a
1057 __builtin_setjmp target in the same function. However, we've
1058 already cautioned the user that these functions are for
1059 internal exception handling use only. */
1060 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1062 gcc_assert (insn
!= last
);
1066 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1069 else if (CALL_P (insn
))
1075 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1077 return (iter
->i
< iter
->n
);
1080 /* This function validates the types of a function call argument list
1081 against a specified list of tree_codes. If the last specifier is a 0,
1082 that represents an ellipses, otherwise the last specifier must be a
1086 validate_arglist (const_tree callexpr
, ...)
1088 enum tree_code code
;
1091 const_call_expr_arg_iterator iter
;
1094 va_start (ap
, callexpr
);
1095 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1099 code
= (enum tree_code
) va_arg (ap
, int);
1103 /* This signifies an ellipses, any further arguments are all ok. */
1107 /* This signifies an endlink, if no arguments remain, return
1108 true, otherwise return false. */
1109 res
= !more_const_call_expr_args_p (&iter
);
1112 /* If no parameters remain or the parameter's code does not
1113 match the specified code, return false. Otherwise continue
1114 checking any remaining arguments. */
1115 arg
= next_const_call_expr_arg (&iter
);
1116 if (!validate_arg (arg
, code
))
1123 /* We need gotos here since we can only have one VA_CLOSE in a
1131 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1132 and the address of the save area. */
1135 expand_builtin_nonlocal_goto (tree exp
)
1137 tree t_label
, t_save_area
;
1138 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1141 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1144 t_label
= CALL_EXPR_ARG (exp
, 0);
1145 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1147 r_label
= expand_normal (t_label
);
1148 r_label
= convert_memory_address (Pmode
, r_label
);
1149 r_save_area
= expand_normal (t_save_area
);
1150 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1151 /* Copy the address of the save location to a register just in case it was
1152 based on the frame pointer. */
1153 r_save_area
= copy_to_reg (r_save_area
);
1154 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1155 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1156 plus_constant (Pmode
, r_save_area
,
1157 GET_MODE_SIZE (Pmode
)));
1159 crtl
->has_nonlocal_goto
= 1;
1161 #ifdef HAVE_nonlocal_goto
1162 /* ??? We no longer need to pass the static chain value, afaik. */
1163 if (HAVE_nonlocal_goto
)
1164 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1168 r_label
= copy_to_reg (r_label
);
1170 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1171 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1173 /* Restore frame pointer for containing function. */
1174 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1175 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1177 /* USE of hard_frame_pointer_rtx added for consistency;
1178 not clear if really needed. */
1179 emit_use (hard_frame_pointer_rtx
);
1180 emit_use (stack_pointer_rtx
);
1182 /* If the architecture is using a GP register, we must
1183 conservatively assume that the target function makes use of it.
1184 The prologue of functions with nonlocal gotos must therefore
1185 initialize the GP register to the appropriate value, and we
1186 must then make sure that this value is live at the point
1187 of the jump. (Note that this doesn't necessarily apply
1188 to targets with a nonlocal_goto pattern; they are free
1189 to implement it in their own way. Note also that this is
1190 a no-op if the GP register is a global invariant.) */
1191 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1192 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1193 emit_use (pic_offset_table_rtx
);
1195 emit_indirect_jump (r_label
);
1198 /* Search backwards to the jump insn and mark it as a
1200 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1204 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1207 else if (CALL_P (insn
))
1214 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1215 (not all will be used on all machines) that was passed to __builtin_setjmp.
1216 It updates the stack pointer in that block to correspond to the current
1220 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1222 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1224 = gen_rtx_MEM (sa_mode
,
1227 plus_constant (Pmode
, buf_addr
,
1228 2 * GET_MODE_SIZE (Pmode
))));
1230 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1233 /* Expand a call to __builtin_prefetch. For a target that does not support
1234 data prefetch, evaluate the memory address argument in case it has side
1238 expand_builtin_prefetch (tree exp
)
1240 tree arg0
, arg1
, arg2
;
1244 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1247 arg0
= CALL_EXPR_ARG (exp
, 0);
1249 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1250 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1252 nargs
= call_expr_nargs (exp
);
1254 arg1
= CALL_EXPR_ARG (exp
, 1);
1256 arg1
= integer_zero_node
;
1258 arg2
= CALL_EXPR_ARG (exp
, 2);
1260 arg2
= integer_three_node
;
1262 /* Argument 0 is an address. */
1263 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1265 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1266 if (TREE_CODE (arg1
) != INTEGER_CST
)
1268 error ("second argument to %<__builtin_prefetch%> must be a constant");
1269 arg1
= integer_zero_node
;
1271 op1
= expand_normal (arg1
);
1272 /* Argument 1 must be either zero or one. */
1273 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1275 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1280 /* Argument 2 (locality) must be a compile-time constant int. */
1281 if (TREE_CODE (arg2
) != INTEGER_CST
)
1283 error ("third argument to %<__builtin_prefetch%> must be a constant");
1284 arg2
= integer_zero_node
;
1286 op2
= expand_normal (arg2
);
1287 /* Argument 2 must be 0, 1, 2, or 3. */
1288 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1290 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1294 #ifdef HAVE_prefetch
1297 struct expand_operand ops
[3];
1299 create_address_operand (&ops
[0], op0
);
1300 create_integer_operand (&ops
[1], INTVAL (op1
));
1301 create_integer_operand (&ops
[2], INTVAL (op2
));
1302 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1307 /* Don't do anything with direct references to volatile memory, but
1308 generate code to handle other side effects. */
1309 if (!MEM_P (op0
) && side_effects_p (op0
))
1313 /* Get a MEM rtx for expression EXP which is the address of an operand
1314 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1315 the maximum length of the block of memory that might be accessed or
1319 get_memory_rtx (tree exp
, tree len
)
1321 tree orig_exp
= exp
;
1324 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1325 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1326 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1327 exp
= TREE_OPERAND (exp
, 0);
1329 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1330 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1332 /* Get an expression we can use to find the attributes to assign to MEM.
1333 First remove any nops. */
1334 while (CONVERT_EXPR_P (exp
)
1335 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1336 exp
= TREE_OPERAND (exp
, 0);
1338 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1339 (as builtin stringops may alias with anything). */
1340 exp
= fold_build2 (MEM_REF
,
1341 build_array_type (char_type_node
,
1342 build_range_type (sizetype
,
1343 size_one_node
, len
)),
1344 exp
, build_int_cst (ptr_type_node
, 0));
1346 /* If the MEM_REF has no acceptable address, try to get the base object
1347 from the original address we got, and build an all-aliasing
1348 unknown-sized access to that one. */
1349 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1350 set_mem_attributes (mem
, exp
, 0);
1351 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1352 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1355 exp
= build_fold_addr_expr (exp
);
1356 exp
= fold_build2 (MEM_REF
,
1357 build_array_type (char_type_node
,
1358 build_range_type (sizetype
,
1361 exp
, build_int_cst (ptr_type_node
, 0));
1362 set_mem_attributes (mem
, exp
, 0);
1364 set_mem_alias_set (mem
, 0);
1368 /* Built-in functions to perform an untyped call and return. */
1370 #define apply_args_mode \
1371 (this_target_builtins->x_apply_args_mode)
1372 #define apply_result_mode \
1373 (this_target_builtins->x_apply_result_mode)
1375 /* Return the size required for the block returned by __builtin_apply_args,
1376 and initialize apply_args_mode. */
1379 apply_args_size (void)
1381 static int size
= -1;
1386 /* The values computed by this function never change. */
1389 /* The first value is the incoming arg-pointer. */
1390 size
= GET_MODE_SIZE (Pmode
);
1392 /* The second value is the structure value address unless this is
1393 passed as an "invisible" first argument. */
1394 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1395 size
+= GET_MODE_SIZE (Pmode
);
1397 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1398 if (FUNCTION_ARG_REGNO_P (regno
))
1400 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1402 gcc_assert (mode
!= VOIDmode
);
1404 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1405 if (size
% align
!= 0)
1406 size
= CEIL (size
, align
) * align
;
1407 size
+= GET_MODE_SIZE (mode
);
1408 apply_args_mode
[regno
] = mode
;
1412 apply_args_mode
[regno
] = VOIDmode
;
1418 /* Return the size required for the block returned by __builtin_apply,
1419 and initialize apply_result_mode. */
1422 apply_result_size (void)
1424 static int size
= -1;
1428 /* The values computed by this function never change. */
1433 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1434 if (targetm
.calls
.function_value_regno_p (regno
))
1436 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1438 gcc_assert (mode
!= VOIDmode
);
1440 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1441 if (size
% align
!= 0)
1442 size
= CEIL (size
, align
) * align
;
1443 size
+= GET_MODE_SIZE (mode
);
1444 apply_result_mode
[regno
] = mode
;
1447 apply_result_mode
[regno
] = VOIDmode
;
1449 /* Allow targets that use untyped_call and untyped_return to override
1450 the size so that machine-specific information can be stored here. */
1451 #ifdef APPLY_RESULT_SIZE
1452 size
= APPLY_RESULT_SIZE
;
1458 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1459 /* Create a vector describing the result block RESULT. If SAVEP is true,
1460 the result block is used to save the values; otherwise it is used to
1461 restore the values. */
1464 result_vector (int savep
, rtx result
)
1466 int regno
, size
, align
, nelts
;
1469 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1472 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1473 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1475 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1476 if (size
% align
!= 0)
1477 size
= CEIL (size
, align
) * align
;
1478 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1479 mem
= adjust_address (result
, mode
, size
);
1480 savevec
[nelts
++] = (savep
1481 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1482 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1483 size
+= GET_MODE_SIZE (mode
);
1485 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1487 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1489 /* Save the state required to perform an untyped call with the same
1490 arguments as were passed to the current function. */
1493 expand_builtin_apply_args_1 (void)
1496 int size
, align
, regno
;
1498 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1500 /* Create a block where the arg-pointer, structure value address,
1501 and argument registers can be saved. */
1502 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1504 /* Walk past the arg-pointer and structure value address. */
1505 size
= GET_MODE_SIZE (Pmode
);
1506 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1507 size
+= GET_MODE_SIZE (Pmode
);
1509 /* Save each register used in calling a function to the block. */
1510 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1511 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1513 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1514 if (size
% align
!= 0)
1515 size
= CEIL (size
, align
) * align
;
1517 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1519 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1520 size
+= GET_MODE_SIZE (mode
);
1523 /* Save the arg pointer to the block. */
1524 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1525 #ifdef STACK_GROWS_DOWNWARD
1526 /* We need the pointer as the caller actually passed them to us, not
1527 as we might have pretended they were passed. Make sure it's a valid
1528 operand, as emit_move_insn isn't expected to handle a PLUS. */
1530 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1533 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1535 size
= GET_MODE_SIZE (Pmode
);
1537 /* Save the structure value address unless this is passed as an
1538 "invisible" first argument. */
1539 if (struct_incoming_value
)
1541 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1542 copy_to_reg (struct_incoming_value
));
1543 size
+= GET_MODE_SIZE (Pmode
);
1546 /* Return the address of the block. */
1547 return copy_addr_to_reg (XEXP (registers
, 0));
1550 /* __builtin_apply_args returns block of memory allocated on
1551 the stack into which is stored the arg pointer, structure
1552 value address, static chain, and all the registers that might
1553 possibly be used in performing a function call. The code is
1554 moved to the start of the function so the incoming values are
1558 expand_builtin_apply_args (void)
1560 /* Don't do __builtin_apply_args more than once in a function.
1561 Save the result of the first call and reuse it. */
1562 if (apply_args_value
!= 0)
1563 return apply_args_value
;
1565 /* When this function is called, it means that registers must be
1566 saved on entry to this function. So we migrate the
1567 call to the first insn of this function. */
1572 temp
= expand_builtin_apply_args_1 ();
1576 apply_args_value
= temp
;
1578 /* Put the insns after the NOTE that starts the function.
1579 If this is inside a start_sequence, make the outer-level insn
1580 chain current, so the code is placed at the start of the
1581 function. If internal_arg_pointer is a non-virtual pseudo,
1582 it needs to be placed after the function that initializes
1584 push_topmost_sequence ();
1585 if (REG_P (crtl
->args
.internal_arg_pointer
)
1586 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1587 emit_insn_before (seq
, parm_birth_insn
);
1589 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1590 pop_topmost_sequence ();
1595 /* Perform an untyped call and save the state required to perform an
1596 untyped return of whatever value was returned by the given function. */
1599 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1601 int size
, align
, regno
;
1603 rtx incoming_args
, result
, reg
, dest
, src
;
1604 rtx_call_insn
*call_insn
;
1605 rtx old_stack_level
= 0;
1606 rtx call_fusage
= 0;
1607 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1609 arguments
= convert_memory_address (Pmode
, arguments
);
1611 /* Create a block where the return registers can be saved. */
1612 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1614 /* Fetch the arg pointer from the ARGUMENTS block. */
1615 incoming_args
= gen_reg_rtx (Pmode
);
1616 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1617 #ifndef STACK_GROWS_DOWNWARD
1618 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1619 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1622 /* Push a new argument block and copy the arguments. Do not allow
1623 the (potential) memcpy call below to interfere with our stack
1625 do_pending_stack_adjust ();
1628 /* Save the stack with nonlocal if available. */
1629 #ifdef HAVE_save_stack_nonlocal
1630 if (HAVE_save_stack_nonlocal
)
1631 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1634 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1636 /* Allocate a block of memory onto the stack and copy the memory
1637 arguments to the outgoing arguments address. We can pass TRUE
1638 as the 4th argument because we just saved the stack pointer
1639 and will restore it right after the call. */
1640 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1642 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1643 may have already set current_function_calls_alloca to true.
1644 current_function_calls_alloca won't be set if argsize is zero,
1645 so we have to guarantee need_drap is true here. */
1646 if (SUPPORTS_STACK_ALIGNMENT
)
1647 crtl
->need_drap
= true;
1649 dest
= virtual_outgoing_args_rtx
;
1650 #ifndef STACK_GROWS_DOWNWARD
1651 if (CONST_INT_P (argsize
))
1652 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1654 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1656 dest
= gen_rtx_MEM (BLKmode
, dest
);
1657 set_mem_align (dest
, PARM_BOUNDARY
);
1658 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1659 set_mem_align (src
, PARM_BOUNDARY
);
1660 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1662 /* Refer to the argument block. */
1664 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1665 set_mem_align (arguments
, PARM_BOUNDARY
);
1667 /* Walk past the arg-pointer and structure value address. */
1668 size
= GET_MODE_SIZE (Pmode
);
1670 size
+= GET_MODE_SIZE (Pmode
);
1672 /* Restore each of the registers previously saved. Make USE insns
1673 for each of these registers for use in making the call. */
1674 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1675 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1677 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1678 if (size
% align
!= 0)
1679 size
= CEIL (size
, align
) * align
;
1680 reg
= gen_rtx_REG (mode
, regno
);
1681 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1682 use_reg (&call_fusage
, reg
);
1683 size
+= GET_MODE_SIZE (mode
);
1686 /* Restore the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 size
= GET_MODE_SIZE (Pmode
);
1691 rtx value
= gen_reg_rtx (Pmode
);
1692 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1693 emit_move_insn (struct_value
, value
);
1694 if (REG_P (struct_value
))
1695 use_reg (&call_fusage
, struct_value
);
1696 size
+= GET_MODE_SIZE (Pmode
);
1699 /* All arguments and registers used for the call are set up by now! */
1700 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1702 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1703 and we don't want to load it into a register as an optimization,
1704 because prepare_call_address already did it if it should be done. */
1705 if (GET_CODE (function
) != SYMBOL_REF
)
1706 function
= memory_address (FUNCTION_MODE
, function
);
1708 /* Generate the actual call instruction and save the return value. */
1709 #ifdef HAVE_untyped_call
1710 if (HAVE_untyped_call
)
1711 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1712 result
, result_vector (1, result
)));
1715 #ifdef HAVE_call_value
1716 if (HAVE_call_value
)
1720 /* Locate the unique return register. It is not possible to
1721 express a call that sets more than one return register using
1722 call_value; use untyped_call for that. In fact, untyped_call
1723 only needs to save the return registers in the given block. */
1724 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1725 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1727 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1729 valreg
= gen_rtx_REG (mode
, regno
);
1732 emit_call_insn (GEN_CALL_VALUE (valreg
,
1733 gen_rtx_MEM (FUNCTION_MODE
, function
),
1734 const0_rtx
, NULL_RTX
, const0_rtx
));
1736 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1742 /* Find the CALL insn we just emitted, and attach the register usage
1744 call_insn
= last_call_insn ();
1745 add_function_usage_to (call_insn
, call_fusage
);
1747 /* Restore the stack. */
1748 #ifdef HAVE_save_stack_nonlocal
1749 if (HAVE_save_stack_nonlocal
)
1750 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1753 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1754 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1758 /* Return the address of the result block. */
1759 result
= copy_addr_to_reg (XEXP (result
, 0));
1760 return convert_memory_address (ptr_mode
, result
);
1763 /* Perform an untyped return. */
1766 expand_builtin_return (rtx result
)
1768 int size
, align
, regno
;
1771 rtx_insn
*call_fusage
= 0;
1773 result
= convert_memory_address (Pmode
, result
);
1775 apply_result_size ();
1776 result
= gen_rtx_MEM (BLKmode
, result
);
1778 #ifdef HAVE_untyped_return
1779 if (HAVE_untyped_return
)
1781 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1787 /* Restore the return value and note that each value is used. */
1789 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1790 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1792 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1793 if (size
% align
!= 0)
1794 size
= CEIL (size
, align
) * align
;
1795 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1796 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1798 push_to_sequence (call_fusage
);
1800 call_fusage
= get_insns ();
1802 size
+= GET_MODE_SIZE (mode
);
1805 /* Put the USE insns before the return. */
1806 emit_insn (call_fusage
);
1808 /* Return whatever values was restored by jumping directly to the end
1810 expand_naked_return ();
1813 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1815 static enum type_class
1816 type_to_class (tree type
)
1818 switch (TREE_CODE (type
))
1820 case VOID_TYPE
: return void_type_class
;
1821 case INTEGER_TYPE
: return integer_type_class
;
1822 case ENUMERAL_TYPE
: return enumeral_type_class
;
1823 case BOOLEAN_TYPE
: return boolean_type_class
;
1824 case POINTER_TYPE
: return pointer_type_class
;
1825 case REFERENCE_TYPE
: return reference_type_class
;
1826 case OFFSET_TYPE
: return offset_type_class
;
1827 case REAL_TYPE
: return real_type_class
;
1828 case COMPLEX_TYPE
: return complex_type_class
;
1829 case FUNCTION_TYPE
: return function_type_class
;
1830 case METHOD_TYPE
: return method_type_class
;
1831 case RECORD_TYPE
: return record_type_class
;
1833 case QUAL_UNION_TYPE
: return union_type_class
;
1834 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1835 ? string_type_class
: array_type_class
);
1836 case LANG_TYPE
: return lang_type_class
;
1837 default: return no_type_class
;
1841 /* Expand a call EXP to __builtin_classify_type. */
1844 expand_builtin_classify_type (tree exp
)
1846 if (call_expr_nargs (exp
))
1847 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1848 return GEN_INT (no_type_class
);
1851 /* This helper macro, meant to be used in mathfn_built_in below,
1852 determines which among a set of three builtin math functions is
1853 appropriate for a given type mode. The `F' and `L' cases are
1854 automatically generated from the `double' case. */
1855 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1856 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1857 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1858 fcodel = BUILT_IN_MATHFN##L ; break;
1859 /* Similar to above, but appends _R after any F/L suffix. */
1860 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1861 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1862 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1863 fcodel = BUILT_IN_MATHFN##L_R ; break;
1865 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1866 if available. If IMPLICIT is true use the implicit builtin declaration,
1867 otherwise use the explicit declaration. If we can't do the conversion,
1871 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1873 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1877 CASE_MATHFN (BUILT_IN_ACOS
)
1878 CASE_MATHFN (BUILT_IN_ACOSH
)
1879 CASE_MATHFN (BUILT_IN_ASIN
)
1880 CASE_MATHFN (BUILT_IN_ASINH
)
1881 CASE_MATHFN (BUILT_IN_ATAN
)
1882 CASE_MATHFN (BUILT_IN_ATAN2
)
1883 CASE_MATHFN (BUILT_IN_ATANH
)
1884 CASE_MATHFN (BUILT_IN_CBRT
)
1885 CASE_MATHFN (BUILT_IN_CEIL
)
1886 CASE_MATHFN (BUILT_IN_CEXPI
)
1887 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1888 CASE_MATHFN (BUILT_IN_COS
)
1889 CASE_MATHFN (BUILT_IN_COSH
)
1890 CASE_MATHFN (BUILT_IN_DREM
)
1891 CASE_MATHFN (BUILT_IN_ERF
)
1892 CASE_MATHFN (BUILT_IN_ERFC
)
1893 CASE_MATHFN (BUILT_IN_EXP
)
1894 CASE_MATHFN (BUILT_IN_EXP10
)
1895 CASE_MATHFN (BUILT_IN_EXP2
)
1896 CASE_MATHFN (BUILT_IN_EXPM1
)
1897 CASE_MATHFN (BUILT_IN_FABS
)
1898 CASE_MATHFN (BUILT_IN_FDIM
)
1899 CASE_MATHFN (BUILT_IN_FLOOR
)
1900 CASE_MATHFN (BUILT_IN_FMA
)
1901 CASE_MATHFN (BUILT_IN_FMAX
)
1902 CASE_MATHFN (BUILT_IN_FMIN
)
1903 CASE_MATHFN (BUILT_IN_FMOD
)
1904 CASE_MATHFN (BUILT_IN_FREXP
)
1905 CASE_MATHFN (BUILT_IN_GAMMA
)
1906 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1907 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1908 CASE_MATHFN (BUILT_IN_HYPOT
)
1909 CASE_MATHFN (BUILT_IN_ILOGB
)
1910 CASE_MATHFN (BUILT_IN_ICEIL
)
1911 CASE_MATHFN (BUILT_IN_IFLOOR
)
1912 CASE_MATHFN (BUILT_IN_INF
)
1913 CASE_MATHFN (BUILT_IN_IRINT
)
1914 CASE_MATHFN (BUILT_IN_IROUND
)
1915 CASE_MATHFN (BUILT_IN_ISINF
)
1916 CASE_MATHFN (BUILT_IN_J0
)
1917 CASE_MATHFN (BUILT_IN_J1
)
1918 CASE_MATHFN (BUILT_IN_JN
)
1919 CASE_MATHFN (BUILT_IN_LCEIL
)
1920 CASE_MATHFN (BUILT_IN_LDEXP
)
1921 CASE_MATHFN (BUILT_IN_LFLOOR
)
1922 CASE_MATHFN (BUILT_IN_LGAMMA
)
1923 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1924 CASE_MATHFN (BUILT_IN_LLCEIL
)
1925 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1926 CASE_MATHFN (BUILT_IN_LLRINT
)
1927 CASE_MATHFN (BUILT_IN_LLROUND
)
1928 CASE_MATHFN (BUILT_IN_LOG
)
1929 CASE_MATHFN (BUILT_IN_LOG10
)
1930 CASE_MATHFN (BUILT_IN_LOG1P
)
1931 CASE_MATHFN (BUILT_IN_LOG2
)
1932 CASE_MATHFN (BUILT_IN_LOGB
)
1933 CASE_MATHFN (BUILT_IN_LRINT
)
1934 CASE_MATHFN (BUILT_IN_LROUND
)
1935 CASE_MATHFN (BUILT_IN_MODF
)
1936 CASE_MATHFN (BUILT_IN_NAN
)
1937 CASE_MATHFN (BUILT_IN_NANS
)
1938 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1939 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1940 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1941 CASE_MATHFN (BUILT_IN_POW
)
1942 CASE_MATHFN (BUILT_IN_POWI
)
1943 CASE_MATHFN (BUILT_IN_POW10
)
1944 CASE_MATHFN (BUILT_IN_REMAINDER
)
1945 CASE_MATHFN (BUILT_IN_REMQUO
)
1946 CASE_MATHFN (BUILT_IN_RINT
)
1947 CASE_MATHFN (BUILT_IN_ROUND
)
1948 CASE_MATHFN (BUILT_IN_SCALB
)
1949 CASE_MATHFN (BUILT_IN_SCALBLN
)
1950 CASE_MATHFN (BUILT_IN_SCALBN
)
1951 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1952 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1953 CASE_MATHFN (BUILT_IN_SIN
)
1954 CASE_MATHFN (BUILT_IN_SINCOS
)
1955 CASE_MATHFN (BUILT_IN_SINH
)
1956 CASE_MATHFN (BUILT_IN_SQRT
)
1957 CASE_MATHFN (BUILT_IN_TAN
)
1958 CASE_MATHFN (BUILT_IN_TANH
)
1959 CASE_MATHFN (BUILT_IN_TGAMMA
)
1960 CASE_MATHFN (BUILT_IN_TRUNC
)
1961 CASE_MATHFN (BUILT_IN_Y0
)
1962 CASE_MATHFN (BUILT_IN_Y1
)
1963 CASE_MATHFN (BUILT_IN_YN
)
1969 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1971 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1973 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1978 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1981 return builtin_decl_explicit (fcode2
);
1984 /* Like mathfn_built_in_1(), but always use the implicit array. */
1987 mathfn_built_in (tree type
, enum built_in_function fn
)
1989 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1992 /* If errno must be maintained, expand the RTL to check if the result,
1993 TARGET, of a built-in function call, EXP, is NaN, and if so set
1997 expand_errno_check (tree exp
, rtx target
)
1999 rtx_code_label
*lab
= gen_label_rtx ();
2001 /* Test the result; if it is NaN, set errno=EDOM because
2002 the argument was not in the domain. */
2003 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
2004 NULL_RTX
, NULL_RTX
, lab
,
2005 /* The jump is very likely. */
2006 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
2009 /* If this built-in doesn't throw an exception, set errno directly. */
2010 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
2012 #ifdef GEN_ERRNO_RTX
2013 rtx errno_rtx
= GEN_ERRNO_RTX
;
2016 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
2018 emit_move_insn (errno_rtx
,
2019 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
2025 /* Make sure the library call isn't expanded as a tail call. */
2026 CALL_EXPR_TAILCALL (exp
) = 0;
2028 /* We can't set errno=EDOM directly; let the library call do it.
2029 Pop the arguments right away in case the call gets deleted. */
2031 expand_call (exp
, target
, 0);
2036 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2037 Return NULL_RTX if a normal call should be emitted rather than expanding
2038 the function in-line. EXP is the expression that is a call to the builtin
2039 function; if convenient, the result should be placed in TARGET.
2040 SUBTARGET may be used as the target for computing one of EXP's operands. */
2043 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2045 optab builtin_optab
;
2048 tree fndecl
= get_callee_fndecl (exp
);
2050 bool errno_set
= false;
2051 bool try_widening
= false;
2054 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2057 arg
= CALL_EXPR_ARG (exp
, 0);
2059 switch (DECL_FUNCTION_CODE (fndecl
))
2061 CASE_FLT_FN (BUILT_IN_SQRT
):
2062 errno_set
= ! tree_expr_nonnegative_p (arg
);
2063 try_widening
= true;
2064 builtin_optab
= sqrt_optab
;
2066 CASE_FLT_FN (BUILT_IN_EXP
):
2067 errno_set
= true; builtin_optab
= exp_optab
; break;
2068 CASE_FLT_FN (BUILT_IN_EXP10
):
2069 CASE_FLT_FN (BUILT_IN_POW10
):
2070 errno_set
= true; builtin_optab
= exp10_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_EXP2
):
2072 errno_set
= true; builtin_optab
= exp2_optab
; break;
2073 CASE_FLT_FN (BUILT_IN_EXPM1
):
2074 errno_set
= true; builtin_optab
= expm1_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_LOGB
):
2076 errno_set
= true; builtin_optab
= logb_optab
; break;
2077 CASE_FLT_FN (BUILT_IN_LOG
):
2078 errno_set
= true; builtin_optab
= log_optab
; break;
2079 CASE_FLT_FN (BUILT_IN_LOG10
):
2080 errno_set
= true; builtin_optab
= log10_optab
; break;
2081 CASE_FLT_FN (BUILT_IN_LOG2
):
2082 errno_set
= true; builtin_optab
= log2_optab
; break;
2083 CASE_FLT_FN (BUILT_IN_LOG1P
):
2084 errno_set
= true; builtin_optab
= log1p_optab
; break;
2085 CASE_FLT_FN (BUILT_IN_ASIN
):
2086 builtin_optab
= asin_optab
; break;
2087 CASE_FLT_FN (BUILT_IN_ACOS
):
2088 builtin_optab
= acos_optab
; break;
2089 CASE_FLT_FN (BUILT_IN_TAN
):
2090 builtin_optab
= tan_optab
; break;
2091 CASE_FLT_FN (BUILT_IN_ATAN
):
2092 builtin_optab
= atan_optab
; break;
2093 CASE_FLT_FN (BUILT_IN_FLOOR
):
2094 builtin_optab
= floor_optab
; break;
2095 CASE_FLT_FN (BUILT_IN_CEIL
):
2096 builtin_optab
= ceil_optab
; break;
2097 CASE_FLT_FN (BUILT_IN_TRUNC
):
2098 builtin_optab
= btrunc_optab
; break;
2099 CASE_FLT_FN (BUILT_IN_ROUND
):
2100 builtin_optab
= round_optab
; break;
2101 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2102 builtin_optab
= nearbyint_optab
;
2103 if (flag_trapping_math
)
2105 /* Else fallthrough and expand as rint. */
2106 CASE_FLT_FN (BUILT_IN_RINT
):
2107 builtin_optab
= rint_optab
; break;
2108 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2109 builtin_optab
= significand_optab
; break;
2114 /* Make a suitable register to place result in. */
2115 mode
= TYPE_MODE (TREE_TYPE (exp
));
2117 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2120 /* Before working hard, check whether the instruction is available, but try
2121 to widen the mode for specific operations. */
2122 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2123 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2124 && (!errno_set
|| !optimize_insn_for_size_p ()))
2126 rtx result
= gen_reg_rtx (mode
);
2128 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2129 need to expand the argument again. This way, we will not perform
2130 side-effects more the once. */
2131 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2133 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2137 /* Compute into RESULT.
2138 Set RESULT to wherever the result comes back. */
2139 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2144 expand_errno_check (exp
, result
);
2146 /* Output the entire sequence. */
2147 insns
= get_insns ();
2153 /* If we were unable to expand via the builtin, stop the sequence
2154 (without outputting the insns) and call to the library function
2155 with the stabilized argument list. */
2159 return expand_call (exp
, target
, target
== const0_rtx
);
2162 /* Expand a call to the builtin binary math functions (pow and atan2).
2163 Return NULL_RTX if a normal call should be emitted rather than expanding the
2164 function in-line. EXP is the expression that is a call to the builtin
2165 function; if convenient, the result should be placed in TARGET.
2166 SUBTARGET may be used as the target for computing one of EXP's
2170 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2172 optab builtin_optab
;
2173 rtx op0
, op1
, result
;
2175 int op1_type
= REAL_TYPE
;
2176 tree fndecl
= get_callee_fndecl (exp
);
2179 bool errno_set
= true;
2181 switch (DECL_FUNCTION_CODE (fndecl
))
2183 CASE_FLT_FN (BUILT_IN_SCALBN
):
2184 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2185 CASE_FLT_FN (BUILT_IN_LDEXP
):
2186 op1_type
= INTEGER_TYPE
;
2191 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2194 arg0
= CALL_EXPR_ARG (exp
, 0);
2195 arg1
= CALL_EXPR_ARG (exp
, 1);
2197 switch (DECL_FUNCTION_CODE (fndecl
))
2199 CASE_FLT_FN (BUILT_IN_POW
):
2200 builtin_optab
= pow_optab
; break;
2201 CASE_FLT_FN (BUILT_IN_ATAN2
):
2202 builtin_optab
= atan2_optab
; break;
2203 CASE_FLT_FN (BUILT_IN_SCALB
):
2204 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2206 builtin_optab
= scalb_optab
; break;
2207 CASE_FLT_FN (BUILT_IN_SCALBN
):
2208 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2209 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2211 /* Fall through... */
2212 CASE_FLT_FN (BUILT_IN_LDEXP
):
2213 builtin_optab
= ldexp_optab
; break;
2214 CASE_FLT_FN (BUILT_IN_FMOD
):
2215 builtin_optab
= fmod_optab
; break;
2216 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2217 CASE_FLT_FN (BUILT_IN_DREM
):
2218 builtin_optab
= remainder_optab
; break;
2223 /* Make a suitable register to place result in. */
2224 mode
= TYPE_MODE (TREE_TYPE (exp
));
2226 /* Before working hard, check whether the instruction is available. */
2227 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2230 result
= gen_reg_rtx (mode
);
2232 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2235 if (errno_set
&& optimize_insn_for_size_p ())
2238 /* Always stabilize the argument list. */
2239 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2240 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2242 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2243 op1
= expand_normal (arg1
);
2247 /* Compute into RESULT.
2248 Set RESULT to wherever the result comes back. */
2249 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2250 result
, 0, OPTAB_DIRECT
);
2252 /* If we were unable to expand via the builtin, stop the sequence
2253 (without outputting the insns) and call to the library function
2254 with the stabilized argument list. */
2258 return expand_call (exp
, target
, target
== const0_rtx
);
2262 expand_errno_check (exp
, result
);
2264 /* Output the entire sequence. */
2265 insns
= get_insns ();
2272 /* Expand a call to the builtin trinary math functions (fma).
2273 Return NULL_RTX if a normal call should be emitted rather than expanding the
2274 function in-line. EXP is the expression that is a call to the builtin
2275 function; if convenient, the result should be placed in TARGET.
2276 SUBTARGET may be used as the target for computing one of EXP's
2280 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2282 optab builtin_optab
;
2283 rtx op0
, op1
, op2
, result
;
2285 tree fndecl
= get_callee_fndecl (exp
);
2286 tree arg0
, arg1
, arg2
;
2289 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2292 arg0
= CALL_EXPR_ARG (exp
, 0);
2293 arg1
= CALL_EXPR_ARG (exp
, 1);
2294 arg2
= CALL_EXPR_ARG (exp
, 2);
2296 switch (DECL_FUNCTION_CODE (fndecl
))
2298 CASE_FLT_FN (BUILT_IN_FMA
):
2299 builtin_optab
= fma_optab
; break;
2304 /* Make a suitable register to place result in. */
2305 mode
= TYPE_MODE (TREE_TYPE (exp
));
2307 /* Before working hard, check whether the instruction is available. */
2308 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2311 result
= gen_reg_rtx (mode
);
2313 /* Always stabilize the argument list. */
2314 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2315 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2316 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2318 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2319 op1
= expand_normal (arg1
);
2320 op2
= expand_normal (arg2
);
2324 /* Compute into RESULT.
2325 Set RESULT to wherever the result comes back. */
2326 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2329 /* If we were unable to expand via the builtin, stop the sequence
2330 (without outputting the insns) and call to the library function
2331 with the stabilized argument list. */
2335 return expand_call (exp
, target
, target
== const0_rtx
);
2338 /* Output the entire sequence. */
2339 insns
= get_insns ();
2346 /* Expand a call to the builtin sin and cos math functions.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2349 function; if convenient, the result should be placed in TARGET.
2350 SUBTARGET may be used as the target for computing one of EXP's
2354 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2356 optab builtin_optab
;
2359 tree fndecl
= get_callee_fndecl (exp
);
2363 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2366 arg
= CALL_EXPR_ARG (exp
, 0);
2368 switch (DECL_FUNCTION_CODE (fndecl
))
2370 CASE_FLT_FN (BUILT_IN_SIN
):
2371 CASE_FLT_FN (BUILT_IN_COS
):
2372 builtin_optab
= sincos_optab
; break;
2377 /* Make a suitable register to place result in. */
2378 mode
= TYPE_MODE (TREE_TYPE (exp
));
2380 /* Check if sincos insn is available, otherwise fallback
2381 to sin or cos insn. */
2382 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2383 switch (DECL_FUNCTION_CODE (fndecl
))
2385 CASE_FLT_FN (BUILT_IN_SIN
):
2386 builtin_optab
= sin_optab
; break;
2387 CASE_FLT_FN (BUILT_IN_COS
):
2388 builtin_optab
= cos_optab
; break;
2393 /* Before working hard, check whether the instruction is available. */
2394 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2396 rtx result
= gen_reg_rtx (mode
);
2398 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2399 need to expand the argument again. This way, we will not perform
2400 side-effects more the once. */
2401 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2403 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2407 /* Compute into RESULT.
2408 Set RESULT to wherever the result comes back. */
2409 if (builtin_optab
== sincos_optab
)
2413 switch (DECL_FUNCTION_CODE (fndecl
))
2415 CASE_FLT_FN (BUILT_IN_SIN
):
2416 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2418 CASE_FLT_FN (BUILT_IN_COS
):
2419 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2427 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2431 /* Output the entire sequence. */
2432 insns
= get_insns ();
2438 /* If we were unable to expand via the builtin, stop the sequence
2439 (without outputting the insns) and call to the library function
2440 with the stabilized argument list. */
2444 return expand_call (exp
, target
, target
== const0_rtx
);
2447 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2448 return an RTL instruction code that implements the functionality.
2449 If that isn't possible or available return CODE_FOR_nothing. */
2451 static enum insn_code
2452 interclass_mathfn_icode (tree arg
, tree fndecl
)
2454 bool errno_set
= false;
2455 optab builtin_optab
= unknown_optab
;
2458 switch (DECL_FUNCTION_CODE (fndecl
))
2460 CASE_FLT_FN (BUILT_IN_ILOGB
):
2461 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2462 CASE_FLT_FN (BUILT_IN_ISINF
):
2463 builtin_optab
= isinf_optab
; break;
2464 case BUILT_IN_ISNORMAL
:
2465 case BUILT_IN_ISFINITE
:
2466 CASE_FLT_FN (BUILT_IN_FINITE
):
2467 case BUILT_IN_FINITED32
:
2468 case BUILT_IN_FINITED64
:
2469 case BUILT_IN_FINITED128
:
2470 case BUILT_IN_ISINFD32
:
2471 case BUILT_IN_ISINFD64
:
2472 case BUILT_IN_ISINFD128
:
2473 /* These builtins have no optabs (yet). */
2479 /* There's no easy way to detect the case we need to set EDOM. */
2480 if (flag_errno_math
&& errno_set
)
2481 return CODE_FOR_nothing
;
2483 /* Optab mode depends on the mode of the input argument. */
2484 mode
= TYPE_MODE (TREE_TYPE (arg
));
2487 return optab_handler (builtin_optab
, mode
);
2488 return CODE_FOR_nothing
;
2491 /* Expand a call to one of the builtin math functions that operate on
2492 floating point argument and output an integer result (ilogb, isinf,
2494 Return 0 if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function; if convenient, the result should be placed in TARGET. */
2499 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2501 enum insn_code icode
= CODE_FOR_nothing
;
2503 tree fndecl
= get_callee_fndecl (exp
);
2507 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2510 arg
= CALL_EXPR_ARG (exp
, 0);
2511 icode
= interclass_mathfn_icode (arg
, fndecl
);
2512 mode
= TYPE_MODE (TREE_TYPE (arg
));
2514 if (icode
!= CODE_FOR_nothing
)
2516 struct expand_operand ops
[1];
2517 rtx_insn
*last
= get_last_insn ();
2518 tree orig_arg
= arg
;
2520 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2521 need to expand the argument again. This way, we will not perform
2522 side-effects more the once. */
2523 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2525 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2527 if (mode
!= GET_MODE (op0
))
2528 op0
= convert_to_mode (mode
, op0
, 0);
2530 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2531 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2532 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2533 return ops
[0].value
;
2535 delete_insns_since (last
);
2536 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2542 /* Expand a call to the builtin sincos math function.
2543 Return NULL_RTX if a normal call should be emitted rather than expanding the
2544 function in-line. EXP is the expression that is a call to the builtin
2548 expand_builtin_sincos (tree exp
)
2550 rtx op0
, op1
, op2
, target1
, target2
;
2552 tree arg
, sinp
, cosp
;
2554 location_t loc
= EXPR_LOCATION (exp
);
2555 tree alias_type
, alias_off
;
2557 if (!validate_arglist (exp
, REAL_TYPE
,
2558 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2561 arg
= CALL_EXPR_ARG (exp
, 0);
2562 sinp
= CALL_EXPR_ARG (exp
, 1);
2563 cosp
= CALL_EXPR_ARG (exp
, 2);
2565 /* Make a suitable register to place result in. */
2566 mode
= TYPE_MODE (TREE_TYPE (arg
));
2568 /* Check if sincos insn is available, otherwise emit the call. */
2569 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2572 target1
= gen_reg_rtx (mode
);
2573 target2
= gen_reg_rtx (mode
);
2575 op0
= expand_normal (arg
);
2576 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2577 alias_off
= build_int_cst (alias_type
, 0);
2578 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2580 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2583 /* Compute into target1 and target2.
2584 Set TARGET to wherever the result comes back. */
2585 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2586 gcc_assert (result
);
2588 /* Move target1 and target2 to the memory locations indicated
2590 emit_move_insn (op1
, target1
);
2591 emit_move_insn (op2
, target2
);
2596 /* Expand a call to the internal cexpi builtin to the sincos math function.
2597 EXP is the expression that is a call to the builtin function; if convenient,
2598 the result should be placed in TARGET. */
2601 expand_builtin_cexpi (tree exp
, rtx target
)
2603 tree fndecl
= get_callee_fndecl (exp
);
2607 location_t loc
= EXPR_LOCATION (exp
);
2609 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2612 arg
= CALL_EXPR_ARG (exp
, 0);
2613 type
= TREE_TYPE (arg
);
2614 mode
= TYPE_MODE (TREE_TYPE (arg
));
2616 /* Try expanding via a sincos optab, fall back to emitting a libcall
2617 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2618 is only generated from sincos, cexp or if we have either of them. */
2619 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2621 op1
= gen_reg_rtx (mode
);
2622 op2
= gen_reg_rtx (mode
);
2624 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2626 /* Compute into op1 and op2. */
2627 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2629 else if (targetm
.libc_has_function (function_sincos
))
2631 tree call
, fn
= NULL_TREE
;
2635 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2636 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2637 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2638 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2639 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2640 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2644 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2645 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2646 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2647 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2648 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2649 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2651 /* Make sure not to fold the sincos call again. */
2652 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2653 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2654 call
, 3, arg
, top1
, top2
));
2658 tree call
, fn
= NULL_TREE
, narg
;
2659 tree ctype
= build_complex_type (type
);
2661 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2662 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2663 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2664 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2665 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2666 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2670 /* If we don't have a decl for cexp create one. This is the
2671 friendliest fallback if the user calls __builtin_cexpi
2672 without full target C99 function support. */
2673 if (fn
== NULL_TREE
)
2676 const char *name
= NULL
;
2678 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2680 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2682 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2685 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2686 fn
= build_fn_decl (name
, fntype
);
2689 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2690 build_real (type
, dconst0
), arg
);
2692 /* Make sure not to fold the cexp call again. */
2693 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2694 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2695 target
, VOIDmode
, EXPAND_NORMAL
);
2698 /* Now build the proper return type. */
2699 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2700 make_tree (TREE_TYPE (arg
), op2
),
2701 make_tree (TREE_TYPE (arg
), op1
)),
2702 target
, VOIDmode
, EXPAND_NORMAL
);
2705 /* Conveniently construct a function call expression. FNDECL names the
2706 function to be called, N is the number of arguments, and the "..."
2707 parameters are the argument expressions. Unlike build_call_exr
2708 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2711 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2714 tree fntype
= TREE_TYPE (fndecl
);
2715 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2718 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2720 SET_EXPR_LOCATION (fn
, loc
);
2724 /* Expand a call to one of the builtin rounding functions gcc defines
2725 as an extension (lfloor and lceil). As these are gcc extensions we
2726 do not need to worry about setting errno to EDOM.
2727 If expanding via optab fails, lower expression to (int)(floor(x)).
2728 EXP is the expression that is a call to the builtin function;
2729 if convenient, the result should be placed in TARGET. */
2732 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2734 convert_optab builtin_optab
;
2737 tree fndecl
= get_callee_fndecl (exp
);
2738 enum built_in_function fallback_fn
;
2739 tree fallback_fndecl
;
2743 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2746 arg
= CALL_EXPR_ARG (exp
, 0);
2748 switch (DECL_FUNCTION_CODE (fndecl
))
2750 CASE_FLT_FN (BUILT_IN_ICEIL
):
2751 CASE_FLT_FN (BUILT_IN_LCEIL
):
2752 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2753 builtin_optab
= lceil_optab
;
2754 fallback_fn
= BUILT_IN_CEIL
;
2757 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2758 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2759 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2760 builtin_optab
= lfloor_optab
;
2761 fallback_fn
= BUILT_IN_FLOOR
;
2768 /* Make a suitable register to place result in. */
2769 mode
= TYPE_MODE (TREE_TYPE (exp
));
2771 target
= gen_reg_rtx (mode
);
2773 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2774 need to expand the argument again. This way, we will not perform
2775 side-effects more the once. */
2776 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2778 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2782 /* Compute into TARGET. */
2783 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2785 /* Output the entire sequence. */
2786 insns
= get_insns ();
2792 /* If we were unable to expand via the builtin, stop the sequence
2793 (without outputting the insns). */
2796 /* Fall back to floating point rounding optab. */
2797 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2799 /* For non-C99 targets we may end up without a fallback fndecl here
2800 if the user called __builtin_lfloor directly. In this case emit
2801 a call to the floor/ceil variants nevertheless. This should result
2802 in the best user experience for not full C99 targets. */
2803 if (fallback_fndecl
== NULL_TREE
)
2806 const char *name
= NULL
;
2808 switch (DECL_FUNCTION_CODE (fndecl
))
2810 case BUILT_IN_ICEIL
:
2811 case BUILT_IN_LCEIL
:
2812 case BUILT_IN_LLCEIL
:
2815 case BUILT_IN_ICEILF
:
2816 case BUILT_IN_LCEILF
:
2817 case BUILT_IN_LLCEILF
:
2820 case BUILT_IN_ICEILL
:
2821 case BUILT_IN_LCEILL
:
2822 case BUILT_IN_LLCEILL
:
2825 case BUILT_IN_IFLOOR
:
2826 case BUILT_IN_LFLOOR
:
2827 case BUILT_IN_LLFLOOR
:
2830 case BUILT_IN_IFLOORF
:
2831 case BUILT_IN_LFLOORF
:
2832 case BUILT_IN_LLFLOORF
:
2835 case BUILT_IN_IFLOORL
:
2836 case BUILT_IN_LFLOORL
:
2837 case BUILT_IN_LLFLOORL
:
2844 fntype
= build_function_type_list (TREE_TYPE (arg
),
2845 TREE_TYPE (arg
), NULL_TREE
);
2846 fallback_fndecl
= build_fn_decl (name
, fntype
);
2849 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2851 tmp
= expand_normal (exp
);
2852 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2854 /* Truncate the result of floating point optab to integer
2855 via expand_fix (). */
2856 target
= gen_reg_rtx (mode
);
2857 expand_fix (target
, tmp
, 0);
2862 /* Expand a call to one of the builtin math functions doing integer
2864 Return 0 if a normal call should be emitted rather than expanding the
2865 function in-line. EXP is the expression that is a call to the builtin
2866 function; if convenient, the result should be placed in TARGET. */
2869 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2871 convert_optab builtin_optab
;
2874 tree fndecl
= get_callee_fndecl (exp
);
2877 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2879 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2882 arg
= CALL_EXPR_ARG (exp
, 0);
2884 switch (DECL_FUNCTION_CODE (fndecl
))
2886 CASE_FLT_FN (BUILT_IN_IRINT
):
2887 fallback_fn
= BUILT_IN_LRINT
;
2889 CASE_FLT_FN (BUILT_IN_LRINT
):
2890 CASE_FLT_FN (BUILT_IN_LLRINT
):
2891 builtin_optab
= lrint_optab
;
2894 CASE_FLT_FN (BUILT_IN_IROUND
):
2895 fallback_fn
= BUILT_IN_LROUND
;
2897 CASE_FLT_FN (BUILT_IN_LROUND
):
2898 CASE_FLT_FN (BUILT_IN_LLROUND
):
2899 builtin_optab
= lround_optab
;
2906 /* There's no easy way to detect the case we need to set EDOM. */
2907 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2910 /* Make a suitable register to place result in. */
2911 mode
= TYPE_MODE (TREE_TYPE (exp
));
2913 /* There's no easy way to detect the case we need to set EDOM. */
2914 if (!flag_errno_math
)
2916 rtx result
= gen_reg_rtx (mode
);
2918 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2919 need to expand the argument again. This way, we will not perform
2920 side-effects more the once. */
2921 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2923 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2927 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2929 /* Output the entire sequence. */
2930 insns
= get_insns ();
2936 /* If we were unable to expand via the builtin, stop the sequence
2937 (without outputting the insns) and call to the library function
2938 with the stabilized argument list. */
2942 if (fallback_fn
!= BUILT_IN_NONE
)
2944 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2945 targets, (int) round (x) should never be transformed into
2946 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2947 a call to lround in the hope that the target provides at least some
2948 C99 functions. This should result in the best user experience for
2949 not full C99 targets. */
2950 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2953 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2954 fallback_fndecl
, 1, arg
);
2956 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2957 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2958 return convert_to_mode (mode
, target
, 0);
2961 return expand_call (exp
, target
, target
== const0_rtx
);
2964 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2965 a normal call should be emitted rather than expanding the function
2966 in-line. EXP is the expression that is a call to the builtin
2967 function; if convenient, the result should be placed in TARGET. */
2970 expand_builtin_powi (tree exp
, rtx target
)
2977 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2980 arg0
= CALL_EXPR_ARG (exp
, 0);
2981 arg1
= CALL_EXPR_ARG (exp
, 1);
2982 mode
= TYPE_MODE (TREE_TYPE (exp
));
2984 /* Emit a libcall to libgcc. */
2986 /* Mode of the 2nd argument must match that of an int. */
2987 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2989 if (target
== NULL_RTX
)
2990 target
= gen_reg_rtx (mode
);
2992 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2993 if (GET_MODE (op0
) != mode
)
2994 op0
= convert_to_mode (mode
, op0
, 0);
2995 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2996 if (GET_MODE (op1
) != mode2
)
2997 op1
= convert_to_mode (mode2
, op1
, 0);
2999 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3000 target
, LCT_CONST
, mode
, 2,
3001 op0
, mode
, op1
, mode2
);
3006 /* Expand expression EXP which is a call to the strlen builtin. Return
3007 NULL_RTX if we failed the caller should emit a normal call, otherwise
3008 try to get the result in TARGET, if convenient. */
3011 expand_builtin_strlen (tree exp
, rtx target
,
3012 machine_mode target_mode
)
3014 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3018 struct expand_operand ops
[4];
3021 tree src
= CALL_EXPR_ARG (exp
, 0);
3023 rtx_insn
*before_strlen
;
3024 machine_mode insn_mode
= target_mode
;
3025 enum insn_code icode
= CODE_FOR_nothing
;
3028 /* If the length can be computed at compile-time, return it. */
3029 len
= c_strlen (src
, 0);
3031 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3033 /* If the length can be computed at compile-time and is constant
3034 integer, but there are side-effects in src, evaluate
3035 src for side-effects, then return len.
3036 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3037 can be optimized into: i++; x = 3; */
3038 len
= c_strlen (src
, 1);
3039 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3041 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3042 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3045 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3047 /* If SRC is not a pointer type, don't do this operation inline. */
3051 /* Bail out if we can't compute strlen in the right mode. */
3052 while (insn_mode
!= VOIDmode
)
3054 icode
= optab_handler (strlen_optab
, insn_mode
);
3055 if (icode
!= CODE_FOR_nothing
)
3058 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3060 if (insn_mode
== VOIDmode
)
3063 /* Make a place to hold the source address. We will not expand
3064 the actual source until we are sure that the expansion will
3065 not fail -- there are trees that cannot be expanded twice. */
3066 src_reg
= gen_reg_rtx (Pmode
);
3068 /* Mark the beginning of the strlen sequence so we can emit the
3069 source operand later. */
3070 before_strlen
= get_last_insn ();
3072 create_output_operand (&ops
[0], target
, insn_mode
);
3073 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3074 create_integer_operand (&ops
[2], 0);
3075 create_integer_operand (&ops
[3], align
);
3076 if (!maybe_expand_insn (icode
, 4, ops
))
3079 /* Now that we are assured of success, expand the source. */
3081 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3084 #ifdef POINTERS_EXTEND_UNSIGNED
3085 if (GET_MODE (pat
) != Pmode
)
3086 pat
= convert_to_mode (Pmode
, pat
,
3087 POINTERS_EXTEND_UNSIGNED
);
3089 emit_move_insn (src_reg
, pat
);
3095 emit_insn_after (pat
, before_strlen
);
3097 emit_insn_before (pat
, get_insns ());
3099 /* Return the value in the proper mode for this function. */
3100 if (GET_MODE (ops
[0].value
) == target_mode
)
3101 target
= ops
[0].value
;
3102 else if (target
!= 0)
3103 convert_move (target
, ops
[0].value
, 0);
3105 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3111 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3112 bytes from constant string DATA + OFFSET and return it as target
3116 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3119 const char *str
= (const char *) data
;
3121 gcc_assert (offset
>= 0
3122 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3123 <= strlen (str
) + 1));
3125 return c_readstr (str
+ offset
, mode
);
3128 /* LEN specify length of the block of memcpy/memset operation.
3129 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3130 In some cases we can make very likely guess on max size, then we
3131 set it into PROBABLE_MAX_SIZE. */
3134 determine_block_size (tree len
, rtx len_rtx
,
3135 unsigned HOST_WIDE_INT
*min_size
,
3136 unsigned HOST_WIDE_INT
*max_size
,
3137 unsigned HOST_WIDE_INT
*probable_max_size
)
3139 if (CONST_INT_P (len_rtx
))
3141 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3147 enum value_range_type range_type
= VR_UNDEFINED
;
3149 /* Determine bounds from the type. */
3150 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3151 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3154 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3155 *probable_max_size
= *max_size
3156 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3158 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3160 if (TREE_CODE (len
) == SSA_NAME
)
3161 range_type
= get_range_info (len
, &min
, &max
);
3162 if (range_type
== VR_RANGE
)
3164 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3165 *min_size
= min
.to_uhwi ();
3166 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3167 *probable_max_size
= *max_size
= max
.to_uhwi ();
3169 else if (range_type
== VR_ANTI_RANGE
)
3171 /* Anti range 0...N lets us to determine minimal size to N+1. */
3174 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3175 *min_size
= max
.to_uhwi () + 1;
3183 Produce anti range allowing negative values of N. We still
3184 can use the information and make a guess that N is not negative.
3186 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3187 *probable_max_size
= min
.to_uhwi () - 1;
3190 gcc_checking_assert (*max_size
<=
3191 (unsigned HOST_WIDE_INT
)
3192 GET_MODE_MASK (GET_MODE (len_rtx
)));
3195 /* Helper function to do the actual work for expand_builtin_memcpy. */
3198 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3200 const char *src_str
;
3201 unsigned int src_align
= get_pointer_alignment (src
);
3202 unsigned int dest_align
= get_pointer_alignment (dest
);
3203 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3204 HOST_WIDE_INT expected_size
= -1;
3205 unsigned int expected_align
= 0;
3206 unsigned HOST_WIDE_INT min_size
;
3207 unsigned HOST_WIDE_INT max_size
;
3208 unsigned HOST_WIDE_INT probable_max_size
;
3210 /* If DEST is not a pointer type, call the normal function. */
3211 if (dest_align
== 0)
3214 /* If either SRC is not a pointer type, don't do this
3215 operation in-line. */
3219 if (currently_expanding_gimple_stmt
)
3220 stringop_block_profile (currently_expanding_gimple_stmt
,
3221 &expected_align
, &expected_size
);
3223 if (expected_align
< dest_align
)
3224 expected_align
= dest_align
;
3225 dest_mem
= get_memory_rtx (dest
, len
);
3226 set_mem_align (dest_mem
, dest_align
);
3227 len_rtx
= expand_normal (len
);
3228 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3229 &probable_max_size
);
3230 src_str
= c_getstr (src
);
3232 /* If SRC is a string constant and block move would be done
3233 by pieces, we can avoid loading the string from memory
3234 and only stored the computed constants. */
3236 && CONST_INT_P (len_rtx
)
3237 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3238 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3239 CONST_CAST (char *, src_str
),
3242 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3243 builtin_memcpy_read_str
,
3244 CONST_CAST (char *, src_str
),
3245 dest_align
, false, 0);
3246 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3247 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3251 src_mem
= get_memory_rtx (src
, len
);
3252 set_mem_align (src_mem
, src_align
);
3254 /* Copy word part most expediently. */
3255 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3256 CALL_EXPR_TAILCALL (exp
)
3257 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3258 expected_align
, expected_size
,
3259 min_size
, max_size
, probable_max_size
);
3263 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3264 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3270 /* Expand a call EXP to the memcpy builtin.
3271 Return NULL_RTX if we failed, the caller should emit a normal call,
3272 otherwise try to get the result in TARGET, if convenient (and in
3273 mode MODE if that's convenient). */
3276 expand_builtin_memcpy (tree exp
, rtx target
)
3278 if (!validate_arglist (exp
,
3279 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3283 tree dest
= CALL_EXPR_ARG (exp
, 0);
3284 tree src
= CALL_EXPR_ARG (exp
, 1);
3285 tree len
= CALL_EXPR_ARG (exp
, 2);
3286 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3290 /* Expand an instrumented call EXP to the memcpy builtin.
3291 Return NULL_RTX if we failed, the caller should emit a normal call,
3292 otherwise try to get the result in TARGET, if convenient (and in
3293 mode MODE if that's convenient). */
3296 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3298 if (!validate_arglist (exp
,
3299 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3300 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3301 INTEGER_TYPE
, VOID_TYPE
))
3305 tree dest
= CALL_EXPR_ARG (exp
, 0);
3306 tree src
= CALL_EXPR_ARG (exp
, 2);
3307 tree len
= CALL_EXPR_ARG (exp
, 4);
3308 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3310 /* Return src bounds with the result. */
3313 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3314 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3315 res
= chkp_join_splitted_slot (res
, bnd
);
3321 /* Expand a call EXP to the mempcpy builtin.
3322 Return NULL_RTX if we failed; the caller should emit a normal call,
3323 otherwise try to get the result in TARGET, if convenient (and in
3324 mode MODE if that's convenient). If ENDP is 0 return the
3325 destination pointer, if ENDP is 1 return the end pointer ala
3326 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3330 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3332 if (!validate_arglist (exp
,
3333 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3337 tree dest
= CALL_EXPR_ARG (exp
, 0);
3338 tree src
= CALL_EXPR_ARG (exp
, 1);
3339 tree len
= CALL_EXPR_ARG (exp
, 2);
3340 return expand_builtin_mempcpy_args (dest
, src
, len
,
3341 target
, mode
, /*endp=*/ 1,
3346 /* Expand an instrumented call EXP to the mempcpy builtin.
3347 Return NULL_RTX if we failed, the caller should emit a normal call,
3348 otherwise try to get the result in TARGET, if convenient (and in
3349 mode MODE if that's convenient). */
3352 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3354 if (!validate_arglist (exp
,
3355 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3356 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3357 INTEGER_TYPE
, VOID_TYPE
))
3361 tree dest
= CALL_EXPR_ARG (exp
, 0);
3362 tree src
= CALL_EXPR_ARG (exp
, 2);
3363 tree len
= CALL_EXPR_ARG (exp
, 4);
3364 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3367 /* Return src bounds with the result. */
3370 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3371 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3372 res
= chkp_join_splitted_slot (res
, bnd
);
3378 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3379 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3380 so that this can also be called without constructing an actual CALL_EXPR.
3381 The other arguments and return value are the same as for
3382 expand_builtin_mempcpy. */
3385 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3386 rtx target
, machine_mode mode
, int endp
,
3389 tree fndecl
= get_callee_fndecl (orig_exp
);
3391 /* If return value is ignored, transform mempcpy into memcpy. */
3392 if (target
== const0_rtx
3393 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3394 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3396 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3397 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3399 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3401 else if (target
== const0_rtx
3402 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3404 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3405 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3407 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3411 const char *src_str
;
3412 unsigned int src_align
= get_pointer_alignment (src
);
3413 unsigned int dest_align
= get_pointer_alignment (dest
);
3414 rtx dest_mem
, src_mem
, len_rtx
;
3416 /* If either SRC or DEST is not a pointer type, don't do this
3417 operation in-line. */
3418 if (dest_align
== 0 || src_align
== 0)
3421 /* If LEN is not constant, call the normal function. */
3422 if (! tree_fits_uhwi_p (len
))
3425 len_rtx
= expand_normal (len
);
3426 src_str
= c_getstr (src
);
3428 /* If SRC is a string constant and block move would be done
3429 by pieces, we can avoid loading the string from memory
3430 and only stored the computed constants. */
3432 && CONST_INT_P (len_rtx
)
3433 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3434 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3435 CONST_CAST (char *, src_str
),
3438 dest_mem
= get_memory_rtx (dest
, len
);
3439 set_mem_align (dest_mem
, dest_align
);
3440 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3441 builtin_memcpy_read_str
,
3442 CONST_CAST (char *, src_str
),
3443 dest_align
, false, endp
);
3444 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3445 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3449 if (CONST_INT_P (len_rtx
)
3450 && can_move_by_pieces (INTVAL (len_rtx
),
3451 MIN (dest_align
, src_align
)))
3453 dest_mem
= get_memory_rtx (dest
, len
);
3454 set_mem_align (dest_mem
, dest_align
);
3455 src_mem
= get_memory_rtx (src
, len
);
3456 set_mem_align (src_mem
, src_align
);
3457 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3458 MIN (dest_align
, src_align
), endp
);
3459 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3460 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3469 # define HAVE_movstr 0
3470 # define CODE_FOR_movstr CODE_FOR_nothing
3473 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3474 we failed, the caller should emit a normal call, otherwise try to
3475 get the result in TARGET, if convenient. If ENDP is 0 return the
3476 destination pointer, if ENDP is 1 return the end pointer ala
3477 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3481 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3483 struct expand_operand ops
[3];
3490 dest_mem
= get_memory_rtx (dest
, NULL
);
3491 src_mem
= get_memory_rtx (src
, NULL
);
3494 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3495 dest_mem
= replace_equiv_address (dest_mem
, target
);
3498 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3499 create_fixed_operand (&ops
[1], dest_mem
);
3500 create_fixed_operand (&ops
[2], src_mem
);
3501 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3504 if (endp
&& target
!= const0_rtx
)
3506 target
= ops
[0].value
;
3507 /* movstr is supposed to set end to the address of the NUL
3508 terminator. If the caller requested a mempcpy-like return value,
3512 rtx tem
= plus_constant (GET_MODE (target
),
3513 gen_lowpart (GET_MODE (target
), target
), 1);
3514 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3520 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3521 NULL_RTX if we failed the caller should emit a normal call, otherwise
3522 try to get the result in TARGET, if convenient (and in mode MODE if that's
3526 expand_builtin_strcpy (tree exp
, rtx target
)
3528 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3530 tree dest
= CALL_EXPR_ARG (exp
, 0);
3531 tree src
= CALL_EXPR_ARG (exp
, 1);
3532 return expand_builtin_strcpy_args (dest
, src
, target
);
3537 /* Helper function to do the actual work for expand_builtin_strcpy. The
3538 arguments to the builtin_strcpy call DEST and SRC are broken out
3539 so that this can also be called without constructing an actual CALL_EXPR.
3540 The other arguments and return value are the same as for
3541 expand_builtin_strcpy. */
3544 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3546 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3549 /* Expand a call EXP to the stpcpy builtin.
3550 Return NULL_RTX if we failed the caller should emit a normal call,
3551 otherwise try to get the result in TARGET, if convenient (and in
3552 mode MODE if that's convenient). */
3555 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3558 location_t loc
= EXPR_LOCATION (exp
);
3560 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3563 dst
= CALL_EXPR_ARG (exp
, 0);
3564 src
= CALL_EXPR_ARG (exp
, 1);
3566 /* If return value is ignored, transform stpcpy into strcpy. */
3567 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3569 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3570 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3571 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3578 /* Ensure we get an actual string whose length can be evaluated at
3579 compile-time, not an expression containing a string. This is
3580 because the latter will potentially produce pessimized code
3581 when used to produce the return value. */
3582 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3583 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3585 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3586 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3587 target
, mode
, /*endp=*/2,
3593 if (TREE_CODE (len
) == INTEGER_CST
)
3595 rtx len_rtx
= expand_normal (len
);
3597 if (CONST_INT_P (len_rtx
))
3599 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3605 if (mode
!= VOIDmode
)
3606 target
= gen_reg_rtx (mode
);
3608 target
= gen_reg_rtx (GET_MODE (ret
));
3610 if (GET_MODE (target
) != GET_MODE (ret
))
3611 ret
= gen_lowpart (GET_MODE (target
), ret
);
3613 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3614 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3622 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3626 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3627 bytes from constant string DATA + OFFSET and return it as target
3631 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3634 const char *str
= (const char *) data
;
3636 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3639 return c_readstr (str
+ offset
, mode
);
3642 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call. */
3646 expand_builtin_strncpy (tree exp
, rtx target
)
3648 location_t loc
= EXPR_LOCATION (exp
);
3650 if (validate_arglist (exp
,
3651 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3653 tree dest
= CALL_EXPR_ARG (exp
, 0);
3654 tree src
= CALL_EXPR_ARG (exp
, 1);
3655 tree len
= CALL_EXPR_ARG (exp
, 2);
3656 tree slen
= c_strlen (src
, 1);
3658 /* We must be passed a constant len and src parameter. */
3659 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3662 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3664 /* We're required to pad with trailing zeros if the requested
3665 len is greater than strlen(s2)+1. In that case try to
3666 use store_by_pieces, if it fails, punt. */
3667 if (tree_int_cst_lt (slen
, len
))
3669 unsigned int dest_align
= get_pointer_alignment (dest
);
3670 const char *p
= c_getstr (src
);
3673 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3674 || !can_store_by_pieces (tree_to_uhwi (len
),
3675 builtin_strncpy_read_str
,
3676 CONST_CAST (char *, p
),
3680 dest_mem
= get_memory_rtx (dest
, len
);
3681 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3682 builtin_strncpy_read_str
,
3683 CONST_CAST (char *, p
), dest_align
, false, 0);
3684 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3685 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3692 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3693 bytes from constant string DATA + OFFSET and return it as target
3697 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3700 const char *c
= (const char *) data
;
3701 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3703 memset (p
, *c
, GET_MODE_SIZE (mode
));
3705 return c_readstr (p
, mode
);
3708 /* Callback routine for store_by_pieces. Return the RTL of a register
3709 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3710 char value given in the RTL register data. For example, if mode is
3711 4 bytes wide, return the RTL for 0x01010101*data. */
3714 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3721 size
= GET_MODE_SIZE (mode
);
3725 p
= XALLOCAVEC (char, size
);
3726 memset (p
, 1, size
);
3727 coeff
= c_readstr (p
, mode
);
3729 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3730 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3731 return force_reg (mode
, target
);
3734 /* Expand expression EXP, which is a call to the memset builtin. Return
3735 NULL_RTX if we failed the caller should emit a normal call, otherwise
3736 try to get the result in TARGET, if convenient (and in mode MODE if that's
3740 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3742 if (!validate_arglist (exp
,
3743 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3747 tree dest
= CALL_EXPR_ARG (exp
, 0);
3748 tree val
= CALL_EXPR_ARG (exp
, 1);
3749 tree len
= CALL_EXPR_ARG (exp
, 2);
3750 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3754 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3755 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3756 try to get the result in TARGET, if convenient (and in mode MODE if that's
3760 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3762 if (!validate_arglist (exp
,
3763 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3764 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3768 tree dest
= CALL_EXPR_ARG (exp
, 0);
3769 tree val
= CALL_EXPR_ARG (exp
, 2);
3770 tree len
= CALL_EXPR_ARG (exp
, 3);
3771 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3773 /* Return src bounds with the result. */
3776 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3777 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3778 res
= chkp_join_splitted_slot (res
, bnd
);
3784 /* Helper function to do the actual work for expand_builtin_memset. The
3785 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3786 so that this can also be called without constructing an actual CALL_EXPR.
3787 The other arguments and return value are the same as for
3788 expand_builtin_memset. */
3791 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3792 rtx target
, machine_mode mode
, tree orig_exp
)
3795 enum built_in_function fcode
;
3796 machine_mode val_mode
;
3798 unsigned int dest_align
;
3799 rtx dest_mem
, dest_addr
, len_rtx
;
3800 HOST_WIDE_INT expected_size
= -1;
3801 unsigned int expected_align
= 0;
3802 unsigned HOST_WIDE_INT min_size
;
3803 unsigned HOST_WIDE_INT max_size
;
3804 unsigned HOST_WIDE_INT probable_max_size
;
3806 dest_align
= get_pointer_alignment (dest
);
3808 /* If DEST is not a pointer type, don't do this operation in-line. */
3809 if (dest_align
== 0)
3812 if (currently_expanding_gimple_stmt
)
3813 stringop_block_profile (currently_expanding_gimple_stmt
,
3814 &expected_align
, &expected_size
);
3816 if (expected_align
< dest_align
)
3817 expected_align
= dest_align
;
3819 /* If the LEN parameter is zero, return DEST. */
3820 if (integer_zerop (len
))
3822 /* Evaluate and ignore VAL in case it has side-effects. */
3823 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3824 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3827 /* Stabilize the arguments in case we fail. */
3828 dest
= builtin_save_expr (dest
);
3829 val
= builtin_save_expr (val
);
3830 len
= builtin_save_expr (len
);
3832 len_rtx
= expand_normal (len
);
3833 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3834 &probable_max_size
);
3835 dest_mem
= get_memory_rtx (dest
, len
);
3836 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3838 if (TREE_CODE (val
) != INTEGER_CST
)
3842 val_rtx
= expand_normal (val
);
3843 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3845 /* Assume that we can memset by pieces if we can store
3846 * the coefficients by pieces (in the required modes).
3847 * We can't pass builtin_memset_gen_str as that emits RTL. */
3849 if (tree_fits_uhwi_p (len
)
3850 && can_store_by_pieces (tree_to_uhwi (len
),
3851 builtin_memset_read_str
, &c
, dest_align
,
3854 val_rtx
= force_reg (val_mode
, val_rtx
);
3855 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3856 builtin_memset_gen_str
, val_rtx
, dest_align
,
3859 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3860 dest_align
, expected_align
,
3861 expected_size
, min_size
, max_size
,
3865 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3866 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3870 if (target_char_cast (val
, &c
))
3875 if (tree_fits_uhwi_p (len
)
3876 && can_store_by_pieces (tree_to_uhwi (len
),
3877 builtin_memset_read_str
, &c
, dest_align
,
3879 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3880 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3881 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3882 gen_int_mode (c
, val_mode
),
3883 dest_align
, expected_align
,
3884 expected_size
, min_size
, max_size
,
3888 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3889 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3893 set_mem_align (dest_mem
, dest_align
);
3894 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3895 CALL_EXPR_TAILCALL (orig_exp
)
3896 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3897 expected_align
, expected_size
,
3903 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3904 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3910 fndecl
= get_callee_fndecl (orig_exp
);
3911 fcode
= DECL_FUNCTION_CODE (fndecl
);
3912 if (fcode
== BUILT_IN_MEMSET
3913 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3914 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3916 else if (fcode
== BUILT_IN_BZERO
)
3917 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3921 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3922 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3923 return expand_call (fn
, target
, target
== const0_rtx
);
3926 /* Expand expression EXP, which is a call to the bzero builtin. Return
3927 NULL_RTX if we failed the caller should emit a normal call. */
3930 expand_builtin_bzero (tree exp
)
3933 location_t loc
= EXPR_LOCATION (exp
);
3935 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3938 dest
= CALL_EXPR_ARG (exp
, 0);
3939 size
= CALL_EXPR_ARG (exp
, 1);
3941 /* New argument list transforming bzero(ptr x, int y) to
3942 memset(ptr x, int 0, size_t y). This is done this way
3943 so that if it isn't expanded inline, we fallback to
3944 calling bzero instead of memset. */
3946 return expand_builtin_memset_args (dest
, integer_zero_node
,
3947 fold_convert_loc (loc
,
3948 size_type_node
, size
),
3949 const0_rtx
, VOIDmode
, exp
);
3952 /* Expand expression EXP, which is a call to the memcmp built-in function.
3953 Return NULL_RTX if we failed and the caller should emit a normal call,
3954 otherwise try to get the result in TARGET, if convenient (and in mode
3955 MODE, if that's convenient). */
3958 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3959 ATTRIBUTE_UNUSED machine_mode mode
)
3961 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3963 if (!validate_arglist (exp
,
3964 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3967 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3968 implementing memcmp because it will stop if it encounters two
3970 #if defined HAVE_cmpmemsi
3972 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3975 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3976 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3977 tree len
= CALL_EXPR_ARG (exp
, 2);
3979 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3980 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3981 machine_mode insn_mode
;
3984 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3988 /* If we don't have POINTER_TYPE, call the function. */
3989 if (arg1_align
== 0 || arg2_align
== 0)
3992 /* Make a place to write the result of the instruction. */
3995 && REG_P (result
) && GET_MODE (result
) == insn_mode
3996 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3997 result
= gen_reg_rtx (insn_mode
);
3999 arg1_rtx
= get_memory_rtx (arg1
, len
);
4000 arg2_rtx
= get_memory_rtx (arg2
, len
);
4001 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4003 /* Set MEM_SIZE as appropriate. */
4004 if (CONST_INT_P (arg3_rtx
))
4006 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
4007 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
4011 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4012 GEN_INT (MIN (arg1_align
, arg2_align
)));
4019 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4020 TYPE_MODE (integer_type_node
), 3,
4021 XEXP (arg1_rtx
, 0), Pmode
,
4022 XEXP (arg2_rtx
, 0), Pmode
,
4023 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4024 TYPE_UNSIGNED (sizetype
)),
4025 TYPE_MODE (sizetype
));
4027 /* Return the value in the proper mode for this function. */
4028 mode
= TYPE_MODE (TREE_TYPE (exp
));
4029 if (GET_MODE (result
) == mode
)
4031 else if (target
!= 0)
4033 convert_move (target
, result
, 0);
4037 return convert_to_mode (mode
, result
, 0);
4039 #endif /* HAVE_cmpmemsi. */
4044 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4045 if we failed the caller should emit a normal call, otherwise try to get
4046 the result in TARGET, if convenient. */
4049 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4051 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4054 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4055 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4056 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4058 rtx arg1_rtx
, arg2_rtx
;
4059 rtx result
, insn
= NULL_RTX
;
4061 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4062 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4064 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4065 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4067 /* If we don't have POINTER_TYPE, call the function. */
4068 if (arg1_align
== 0 || arg2_align
== 0)
4071 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4072 arg1
= builtin_save_expr (arg1
);
4073 arg2
= builtin_save_expr (arg2
);
4075 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4076 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4078 #ifdef HAVE_cmpstrsi
4079 /* Try to call cmpstrsi. */
4082 machine_mode insn_mode
4083 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4085 /* Make a place to write the result of the instruction. */
4088 && REG_P (result
) && GET_MODE (result
) == insn_mode
4089 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4090 result
= gen_reg_rtx (insn_mode
);
4092 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4093 GEN_INT (MIN (arg1_align
, arg2_align
)));
4096 #ifdef HAVE_cmpstrnsi
4097 /* Try to determine at least one length and call cmpstrnsi. */
4098 if (!insn
&& HAVE_cmpstrnsi
)
4103 machine_mode insn_mode
4104 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4105 tree len1
= c_strlen (arg1
, 1);
4106 tree len2
= c_strlen (arg2
, 1);
4109 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4111 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4113 /* If we don't have a constant length for the first, use the length
4114 of the second, if we know it. We don't require a constant for
4115 this case; some cost analysis could be done if both are available
4116 but neither is constant. For now, assume they're equally cheap,
4117 unless one has side effects. If both strings have constant lengths,
4124 else if (TREE_SIDE_EFFECTS (len1
))
4126 else if (TREE_SIDE_EFFECTS (len2
))
4128 else if (TREE_CODE (len1
) != INTEGER_CST
)
4130 else if (TREE_CODE (len2
) != INTEGER_CST
)
4132 else if (tree_int_cst_lt (len1
, len2
))
4137 /* If both arguments have side effects, we cannot optimize. */
4138 if (!len
|| TREE_SIDE_EFFECTS (len
))
4141 arg3_rtx
= expand_normal (len
);
4143 /* Make a place to write the result of the instruction. */
4146 && REG_P (result
) && GET_MODE (result
) == insn_mode
4147 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4148 result
= gen_reg_rtx (insn_mode
);
4150 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4151 GEN_INT (MIN (arg1_align
, arg2_align
)));
4160 /* Return the value in the proper mode for this function. */
4161 mode
= TYPE_MODE (TREE_TYPE (exp
));
4162 if (GET_MODE (result
) == mode
)
4165 return convert_to_mode (mode
, result
, 0);
4166 convert_move (target
, result
, 0);
4170 /* Expand the library call ourselves using a stabilized argument
4171 list to avoid re-evaluating the function's arguments twice. */
4172 #ifdef HAVE_cmpstrnsi
4175 fndecl
= get_callee_fndecl (exp
);
4176 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4177 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4178 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4179 return expand_call (fn
, target
, target
== const0_rtx
);
4185 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4186 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4187 the result in TARGET, if convenient. */
4190 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4191 ATTRIBUTE_UNUSED machine_mode mode
)
4193 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4195 if (!validate_arglist (exp
,
4196 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4199 /* If c_strlen can determine an expression for one of the string
4200 lengths, and it doesn't have side effects, then emit cmpstrnsi
4201 using length MIN(strlen(string)+1, arg3). */
4202 #ifdef HAVE_cmpstrnsi
4205 tree len
, len1
, len2
;
4206 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4209 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4210 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4211 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4213 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4214 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4215 machine_mode insn_mode
4216 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4218 len1
= c_strlen (arg1
, 1);
4219 len2
= c_strlen (arg2
, 1);
4222 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4224 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4226 /* If we don't have a constant length for the first, use the length
4227 of the second, if we know it. We don't require a constant for
4228 this case; some cost analysis could be done if both are available
4229 but neither is constant. For now, assume they're equally cheap,
4230 unless one has side effects. If both strings have constant lengths,
4237 else if (TREE_SIDE_EFFECTS (len1
))
4239 else if (TREE_SIDE_EFFECTS (len2
))
4241 else if (TREE_CODE (len1
) != INTEGER_CST
)
4243 else if (TREE_CODE (len2
) != INTEGER_CST
)
4245 else if (tree_int_cst_lt (len1
, len2
))
4250 /* If both arguments have side effects, we cannot optimize. */
4251 if (!len
|| TREE_SIDE_EFFECTS (len
))
4254 /* The actual new length parameter is MIN(len,arg3). */
4255 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4256 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4258 /* If we don't have POINTER_TYPE, call the function. */
4259 if (arg1_align
== 0 || arg2_align
== 0)
4262 /* Make a place to write the result of the instruction. */
4265 && REG_P (result
) && GET_MODE (result
) == insn_mode
4266 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4267 result
= gen_reg_rtx (insn_mode
);
4269 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4270 arg1
= builtin_save_expr (arg1
);
4271 arg2
= builtin_save_expr (arg2
);
4272 len
= builtin_save_expr (len
);
4274 arg1_rtx
= get_memory_rtx (arg1
, len
);
4275 arg2_rtx
= get_memory_rtx (arg2
, len
);
4276 arg3_rtx
= expand_normal (len
);
4277 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4278 GEN_INT (MIN (arg1_align
, arg2_align
)));
4283 /* Return the value in the proper mode for this function. */
4284 mode
= TYPE_MODE (TREE_TYPE (exp
));
4285 if (GET_MODE (result
) == mode
)
4288 return convert_to_mode (mode
, result
, 0);
4289 convert_move (target
, result
, 0);
4293 /* Expand the library call ourselves using a stabilized argument
4294 list to avoid re-evaluating the function's arguments twice. */
4295 fndecl
= get_callee_fndecl (exp
);
4296 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4298 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4299 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4300 return expand_call (fn
, target
, target
== const0_rtx
);
4306 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4307 if that's convenient. */
4310 expand_builtin_saveregs (void)
4315 /* Don't do __builtin_saveregs more than once in a function.
4316 Save the result of the first call and reuse it. */
4317 if (saveregs_value
!= 0)
4318 return saveregs_value
;
4320 /* When this function is called, it means that registers must be
4321 saved on entry to this function. So we migrate the call to the
4322 first insn of this function. */
4326 /* Do whatever the machine needs done in this case. */
4327 val
= targetm
.calls
.expand_builtin_saveregs ();
4332 saveregs_value
= val
;
4334 /* Put the insns after the NOTE that starts the function. If this
4335 is inside a start_sequence, make the outer-level insn chain current, so
4336 the code is placed at the start of the function. */
4337 push_topmost_sequence ();
4338 emit_insn_after (seq
, entry_of_function ());
4339 pop_topmost_sequence ();
4344 /* Expand a call to __builtin_next_arg. */
4347 expand_builtin_next_arg (void)
4349 /* Checking arguments is already done in fold_builtin_next_arg
4350 that must be called before this function. */
4351 return expand_binop (ptr_mode
, add_optab
,
4352 crtl
->args
.internal_arg_pointer
,
4353 crtl
->args
.arg_offset_rtx
,
4354 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4357 /* Make it easier for the backends by protecting the valist argument
4358 from multiple evaluations. */
4361 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4363 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4365 /* The current way of determining the type of valist is completely
4366 bogus. We should have the information on the va builtin instead. */
4368 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4370 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4372 if (TREE_SIDE_EFFECTS (valist
))
4373 valist
= save_expr (valist
);
4375 /* For this case, the backends will be expecting a pointer to
4376 vatype, but it's possible we've actually been given an array
4377 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4379 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4381 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4382 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4387 tree pt
= build_pointer_type (vatype
);
4391 if (! TREE_SIDE_EFFECTS (valist
))
4394 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4395 TREE_SIDE_EFFECTS (valist
) = 1;
4398 if (TREE_SIDE_EFFECTS (valist
))
4399 valist
= save_expr (valist
);
4400 valist
= fold_build2_loc (loc
, MEM_REF
,
4401 vatype
, valist
, build_int_cst (pt
, 0));
4407 /* The "standard" definition of va_list is void*. */
4410 std_build_builtin_va_list (void)
4412 return ptr_type_node
;
4415 /* The "standard" abi va_list is va_list_type_node. */
4418 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4420 return va_list_type_node
;
4423 /* The "standard" type of va_list is va_list_type_node. */
4426 std_canonical_va_list_type (tree type
)
4430 if (INDIRECT_REF_P (type
))
4431 type
= TREE_TYPE (type
);
4432 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4433 type
= TREE_TYPE (type
);
4434 wtype
= va_list_type_node
;
4436 /* Treat structure va_list types. */
4437 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4438 htype
= TREE_TYPE (htype
);
4439 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4441 /* If va_list is an array type, the argument may have decayed
4442 to a pointer type, e.g. by being passed to another function.
4443 In that case, unwrap both types so that we can compare the
4444 underlying records. */
4445 if (TREE_CODE (htype
) == ARRAY_TYPE
4446 || POINTER_TYPE_P (htype
))
4448 wtype
= TREE_TYPE (wtype
);
4449 htype
= TREE_TYPE (htype
);
4452 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4453 return va_list_type_node
;
4458 /* The "standard" implementation of va_start: just assign `nextarg' to
4462 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4464 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4465 convert_move (va_r
, nextarg
, 0);
4467 /* We do not have any valid bounds for the pointer, so
4468 just store zero bounds for it. */
4469 if (chkp_function_instrumented_p (current_function_decl
))
4470 chkp_expand_bounds_reset_for_mem (valist
,
4471 make_tree (TREE_TYPE (valist
),
4475 /* Expand EXP, a call to __builtin_va_start. */
4478 expand_builtin_va_start (tree exp
)
4482 location_t loc
= EXPR_LOCATION (exp
);
4484 if (call_expr_nargs (exp
) < 2)
4486 error_at (loc
, "too few arguments to function %<va_start%>");
4490 if (fold_builtin_next_arg (exp
, true))
4493 nextarg
= expand_builtin_next_arg ();
4494 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4496 if (targetm
.expand_builtin_va_start
)
4497 targetm
.expand_builtin_va_start (valist
, nextarg
);
4499 std_expand_builtin_va_start (valist
, nextarg
);
4504 /* Expand EXP, a call to __builtin_va_end. */
4507 expand_builtin_va_end (tree exp
)
4509 tree valist
= CALL_EXPR_ARG (exp
, 0);
4511 /* Evaluate for side effects, if needed. I hate macros that don't
4513 if (TREE_SIDE_EFFECTS (valist
))
4514 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4519 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4520 builtin rather than just as an assignment in stdarg.h because of the
4521 nastiness of array-type va_list types. */
4524 expand_builtin_va_copy (tree exp
)
4527 location_t loc
= EXPR_LOCATION (exp
);
4529 dst
= CALL_EXPR_ARG (exp
, 0);
4530 src
= CALL_EXPR_ARG (exp
, 1);
4532 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4533 src
= stabilize_va_list_loc (loc
, src
, 0);
4535 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4537 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4539 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4540 TREE_SIDE_EFFECTS (t
) = 1;
4541 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4545 rtx dstb
, srcb
, size
;
4547 /* Evaluate to pointers. */
4548 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4549 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4550 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4551 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4553 dstb
= convert_memory_address (Pmode
, dstb
);
4554 srcb
= convert_memory_address (Pmode
, srcb
);
4556 /* "Dereference" to BLKmode memories. */
4557 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4558 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4559 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4560 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4561 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4562 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4565 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4571 /* Expand a call to one of the builtin functions __builtin_frame_address or
4572 __builtin_return_address. */
4575 expand_builtin_frame_address (tree fndecl
, tree exp
)
4577 /* The argument must be a nonnegative integer constant.
4578 It counts the number of frames to scan up the stack.
4579 The value is the return address saved in that frame. */
4580 if (call_expr_nargs (exp
) == 0)
4581 /* Warning about missing arg was already issued. */
4583 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4585 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4586 error ("invalid argument to %<__builtin_frame_address%>");
4588 error ("invalid argument to %<__builtin_return_address%>");
4594 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4595 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4597 /* Some ports cannot access arbitrary stack frames. */
4600 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4601 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4603 warning (0, "unsupported argument to %<__builtin_return_address%>");
4607 /* For __builtin_frame_address, return what we've got. */
4608 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4612 && ! CONSTANT_P (tem
))
4613 tem
= copy_addr_to_reg (tem
);
4618 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4619 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4620 is the same as for allocate_dynamic_stack_space. */
4623 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4629 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4630 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4633 = (alloca_with_align
4634 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4635 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4640 /* Compute the argument. */
4641 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4643 /* Compute the alignment. */
4644 align
= (alloca_with_align
4645 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4646 : BIGGEST_ALIGNMENT
);
4648 /* Allocate the desired space. */
4649 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4650 result
= convert_memory_address (ptr_mode
, result
);
4655 /* Expand a call to bswap builtin in EXP.
4656 Return NULL_RTX if a normal call should be emitted rather than expanding the
4657 function in-line. If convenient, the result should be placed in TARGET.
4658 SUBTARGET may be used as the target for computing one of EXP's operands. */
4661 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4667 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4670 arg
= CALL_EXPR_ARG (exp
, 0);
4671 op0
= expand_expr (arg
,
4672 subtarget
&& GET_MODE (subtarget
) == target_mode
4673 ? subtarget
: NULL_RTX
,
4674 target_mode
, EXPAND_NORMAL
);
4675 if (GET_MODE (op0
) != target_mode
)
4676 op0
= convert_to_mode (target_mode
, op0
, 1);
4678 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4680 gcc_assert (target
);
4682 return convert_to_mode (target_mode
, target
, 1);
4685 /* Expand a call to a unary builtin in EXP.
4686 Return NULL_RTX if a normal call should be emitted rather than expanding the
4687 function in-line. If convenient, the result should be placed in TARGET.
4688 SUBTARGET may be used as the target for computing one of EXP's operands. */
4691 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4692 rtx subtarget
, optab op_optab
)
4696 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4699 /* Compute the argument. */
4700 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4702 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4703 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4704 VOIDmode
, EXPAND_NORMAL
);
4705 /* Compute op, into TARGET if possible.
4706 Set TARGET to wherever the result comes back. */
4707 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4708 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4709 gcc_assert (target
);
4711 return convert_to_mode (target_mode
, target
, 0);
4714 /* Expand a call to __builtin_expect. We just return our argument
4715 as the builtin_expect semantic should've been already executed by
4716 tree branch prediction pass. */
4719 expand_builtin_expect (tree exp
, rtx target
)
4723 if (call_expr_nargs (exp
) < 2)
4725 arg
= CALL_EXPR_ARG (exp
, 0);
4727 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4728 /* When guessing was done, the hints should be already stripped away. */
4729 gcc_assert (!flag_guess_branch_prob
4730 || optimize
== 0 || seen_error ());
4734 /* Expand a call to __builtin_assume_aligned. We just return our first
4735 argument as the builtin_assume_aligned semantic should've been already
4739 expand_builtin_assume_aligned (tree exp
, rtx target
)
4741 if (call_expr_nargs (exp
) < 2)
4743 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4745 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4746 && (call_expr_nargs (exp
) < 3
4747 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4752 expand_builtin_trap (void)
4757 rtx insn
= emit_insn (gen_trap ());
4758 /* For trap insns when not accumulating outgoing args force
4759 REG_ARGS_SIZE note to prevent crossjumping of calls with
4760 different args sizes. */
4761 if (!ACCUMULATE_OUTGOING_ARGS
)
4762 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4766 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4770 /* Expand a call to __builtin_unreachable. We do nothing except emit
4771 a barrier saying that control flow will not pass here.
4773 It is the responsibility of the program being compiled to ensure
4774 that control flow does never reach __builtin_unreachable. */
4776 expand_builtin_unreachable (void)
4781 /* Expand EXP, a call to fabs, fabsf or fabsl.
4782 Return NULL_RTX if a normal call should be emitted rather than expanding
4783 the function inline. If convenient, the result should be placed
4784 in TARGET. SUBTARGET may be used as the target for computing
4788 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4794 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4797 arg
= CALL_EXPR_ARG (exp
, 0);
4798 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4799 mode
= TYPE_MODE (TREE_TYPE (arg
));
4800 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4801 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4804 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4805 Return NULL is a normal call should be emitted rather than expanding the
4806 function inline. If convenient, the result should be placed in TARGET.
4807 SUBTARGET may be used as the target for computing the operand. */
4810 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4815 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4818 arg
= CALL_EXPR_ARG (exp
, 0);
4819 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4821 arg
= CALL_EXPR_ARG (exp
, 1);
4822 op1
= expand_normal (arg
);
4824 return expand_copysign (op0
, op1
, target
);
4827 /* Expand a call to __builtin___clear_cache. */
4830 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4832 #ifndef HAVE_clear_cache
4833 #ifdef CLEAR_INSN_CACHE
4834 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4835 does something. Just do the default expansion to a call to
4839 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4840 does nothing. There is no need to call it. Do nothing. */
4842 #endif /* CLEAR_INSN_CACHE */
4844 /* We have a "clear_cache" insn, and it will handle everything. */
4846 rtx begin_rtx
, end_rtx
;
4848 /* We must not expand to a library call. If we did, any
4849 fallback library function in libgcc that might contain a call to
4850 __builtin___clear_cache() would recurse infinitely. */
4851 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4853 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4857 if (HAVE_clear_cache
)
4859 struct expand_operand ops
[2];
4861 begin
= CALL_EXPR_ARG (exp
, 0);
4862 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4864 end
= CALL_EXPR_ARG (exp
, 1);
4865 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4867 create_address_operand (&ops
[0], begin_rtx
);
4868 create_address_operand (&ops
[1], end_rtx
);
4869 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4873 #endif /* HAVE_clear_cache */
4876 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4879 round_trampoline_addr (rtx tramp
)
4881 rtx temp
, addend
, mask
;
4883 /* If we don't need too much alignment, we'll have been guaranteed
4884 proper alignment by get_trampoline_type. */
4885 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4888 /* Round address up to desired boundary. */
4889 temp
= gen_reg_rtx (Pmode
);
4890 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4891 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4893 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4894 temp
, 0, OPTAB_LIB_WIDEN
);
4895 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4896 temp
, 0, OPTAB_LIB_WIDEN
);
4902 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4904 tree t_tramp
, t_func
, t_chain
;
4905 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4907 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4908 POINTER_TYPE
, VOID_TYPE
))
4911 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4912 t_func
= CALL_EXPR_ARG (exp
, 1);
4913 t_chain
= CALL_EXPR_ARG (exp
, 2);
4915 r_tramp
= expand_normal (t_tramp
);
4916 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4917 MEM_NOTRAP_P (m_tramp
) = 1;
4919 /* If ONSTACK, the TRAMP argument should be the address of a field
4920 within the local function's FRAME decl. Either way, let's see if
4921 we can fill in the MEM_ATTRs for this memory. */
4922 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4923 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4925 /* Creator of a heap trampoline is responsible for making sure the
4926 address is aligned to at least STACK_BOUNDARY. Normally malloc
4927 will ensure this anyhow. */
4928 tmp
= round_trampoline_addr (r_tramp
);
4931 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4932 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4933 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4936 /* The FUNC argument should be the address of the nested function.
4937 Extract the actual function decl to pass to the hook. */
4938 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4939 t_func
= TREE_OPERAND (t_func
, 0);
4940 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4942 r_chain
= expand_normal (t_chain
);
4944 /* Generate insns to initialize the trampoline. */
4945 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4949 trampolines_created
= 1;
4951 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4952 "trampoline generated for nested function %qD", t_func
);
4959 expand_builtin_adjust_trampoline (tree exp
)
4963 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4966 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4967 tramp
= round_trampoline_addr (tramp
);
4968 if (targetm
.calls
.trampoline_adjust_address
)
4969 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4974 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4975 function. The function first checks whether the back end provides
4976 an insn to implement signbit for the respective mode. If not, it
4977 checks whether the floating point format of the value is such that
4978 the sign bit can be extracted. If that is not the case, the
4979 function returns NULL_RTX to indicate that a normal call should be
4980 emitted rather than expanding the function in-line. EXP is the
4981 expression that is a call to the builtin function; if convenient,
4982 the result should be placed in TARGET. */
4984 expand_builtin_signbit (tree exp
, rtx target
)
4986 const struct real_format
*fmt
;
4987 machine_mode fmode
, imode
, rmode
;
4990 enum insn_code icode
;
4992 location_t loc
= EXPR_LOCATION (exp
);
4994 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4997 arg
= CALL_EXPR_ARG (exp
, 0);
4998 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4999 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5000 fmt
= REAL_MODE_FORMAT (fmode
);
5002 arg
= builtin_save_expr (arg
);
5004 /* Expand the argument yielding a RTX expression. */
5005 temp
= expand_normal (arg
);
5007 /* Check if the back end provides an insn that handles signbit for the
5009 icode
= optab_handler (signbit_optab
, fmode
);
5010 if (icode
!= CODE_FOR_nothing
)
5012 rtx_insn
*last
= get_last_insn ();
5013 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5014 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5016 delete_insns_since (last
);
5019 /* For floating point formats without a sign bit, implement signbit
5021 bitpos
= fmt
->signbit_ro
;
5024 /* But we can't do this if the format supports signed zero. */
5025 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5028 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5029 build_real (TREE_TYPE (arg
), dconst0
));
5030 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5033 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5035 imode
= int_mode_for_mode (fmode
);
5036 if (imode
== BLKmode
)
5038 temp
= gen_lowpart (imode
, temp
);
5043 /* Handle targets with different FP word orders. */
5044 if (FLOAT_WORDS_BIG_ENDIAN
)
5045 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5047 word
= bitpos
/ BITS_PER_WORD
;
5048 temp
= operand_subword_force (temp
, word
, fmode
);
5049 bitpos
= bitpos
% BITS_PER_WORD
;
5052 /* Force the intermediate word_mode (or narrower) result into a
5053 register. This avoids attempting to create paradoxical SUBREGs
5054 of floating point modes below. */
5055 temp
= force_reg (imode
, temp
);
5057 /* If the bitpos is within the "result mode" lowpart, the operation
5058 can be implement with a single bitwise AND. Otherwise, we need
5059 a right shift and an AND. */
5061 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5063 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5065 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5066 temp
= gen_lowpart (rmode
, temp
);
5067 temp
= expand_binop (rmode
, and_optab
, temp
,
5068 immed_wide_int_const (mask
, rmode
),
5069 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5073 /* Perform a logical right shift to place the signbit in the least
5074 significant bit, then truncate the result to the desired mode
5075 and mask just this bit. */
5076 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5077 temp
= gen_lowpart (rmode
, temp
);
5078 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5079 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5085 /* Expand fork or exec calls. TARGET is the desired target of the
5086 call. EXP is the call. FN is the
5087 identificator of the actual function. IGNORE is nonzero if the
5088 value is to be ignored. */
5091 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5096 /* If we are not profiling, just call the function. */
5097 if (!profile_arc_flag
)
5100 /* Otherwise call the wrapper. This should be equivalent for the rest of
5101 compiler, so the code does not diverge, and the wrapper may run the
5102 code necessary for keeping the profiling sane. */
5104 switch (DECL_FUNCTION_CODE (fn
))
5107 id
= get_identifier ("__gcov_fork");
5110 case BUILT_IN_EXECL
:
5111 id
= get_identifier ("__gcov_execl");
5114 case BUILT_IN_EXECV
:
5115 id
= get_identifier ("__gcov_execv");
5118 case BUILT_IN_EXECLP
:
5119 id
= get_identifier ("__gcov_execlp");
5122 case BUILT_IN_EXECLE
:
5123 id
= get_identifier ("__gcov_execle");
5126 case BUILT_IN_EXECVP
:
5127 id
= get_identifier ("__gcov_execvp");
5130 case BUILT_IN_EXECVE
:
5131 id
= get_identifier ("__gcov_execve");
5138 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5139 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5140 DECL_EXTERNAL (decl
) = 1;
5141 TREE_PUBLIC (decl
) = 1;
5142 DECL_ARTIFICIAL (decl
) = 1;
5143 TREE_NOTHROW (decl
) = 1;
5144 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5145 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5146 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5147 return expand_call (call
, target
, ignore
);
5152 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5153 the pointer in these functions is void*, the tree optimizers may remove
5154 casts. The mode computed in expand_builtin isn't reliable either, due
5155 to __sync_bool_compare_and_swap.
5157 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5158 group of builtins. This gives us log2 of the mode size. */
5160 static inline machine_mode
5161 get_builtin_sync_mode (int fcode_diff
)
5163 /* The size is not negotiable, so ask not to get BLKmode in return
5164 if the target indicates that a smaller size would be better. */
5165 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5168 /* Expand the memory expression LOC and return the appropriate memory operand
5169 for the builtin_sync operations. */
5172 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5176 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5177 addr
= convert_memory_address (Pmode
, addr
);
5179 /* Note that we explicitly do not want any alias information for this
5180 memory, so that we kill all other live memories. Otherwise we don't
5181 satisfy the full barrier semantics of the intrinsic. */
5182 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5184 /* The alignment needs to be at least according to that of the mode. */
5185 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5186 get_pointer_alignment (loc
)));
5187 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5188 MEM_VOLATILE_P (mem
) = 1;
5193 /* Make sure an argument is in the right mode.
5194 EXP is the tree argument.
5195 MODE is the mode it should be in. */
5198 expand_expr_force_mode (tree exp
, machine_mode mode
)
5201 machine_mode old_mode
;
5203 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5204 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5205 of CONST_INTs, where we know the old_mode only from the call argument. */
5207 old_mode
= GET_MODE (val
);
5208 if (old_mode
== VOIDmode
)
5209 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5210 val
= convert_modes (mode
, old_mode
, val
, 1);
5215 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5216 EXP is the CALL_EXPR. CODE is the rtx code
5217 that corresponds to the arithmetic or logical operation from the name;
5218 an exception here is that NOT actually means NAND. TARGET is an optional
5219 place for us to store the results; AFTER is true if this is the
5220 fetch_and_xxx form. */
5223 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5224 enum rtx_code code
, bool after
,
5228 location_t loc
= EXPR_LOCATION (exp
);
5230 if (code
== NOT
&& warn_sync_nand
)
5232 tree fndecl
= get_callee_fndecl (exp
);
5233 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5235 static bool warned_f_a_n
, warned_n_a_f
;
5239 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5240 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5241 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5242 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5243 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5247 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5248 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5249 warned_f_a_n
= true;
5252 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5253 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5254 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5255 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5256 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5260 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5261 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5262 warned_n_a_f
= true;
5270 /* Expand the operands. */
5271 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5272 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5274 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5278 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5279 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5280 true if this is the boolean form. TARGET is a place for us to store the
5281 results; this is NOT optional if IS_BOOL is true. */
5284 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5285 bool is_bool
, rtx target
)
5287 rtx old_val
, new_val
, mem
;
5290 /* Expand the operands. */
5291 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5292 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5293 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5295 pbool
= poval
= NULL
;
5296 if (target
!= const0_rtx
)
5303 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5304 false, MEMMODEL_SEQ_CST
,
5311 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5312 general form is actually an atomic exchange, and some targets only
5313 support a reduced form with the second argument being a constant 1.
5314 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5318 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5323 /* Expand the operands. */
5324 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5325 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5327 return expand_sync_lock_test_and_set (target
, mem
, val
);
5330 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5333 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5337 /* Expand the operands. */
5338 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5340 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5343 /* Given an integer representing an ``enum memmodel'', verify its
5344 correctness and return the memory model enum. */
5346 static enum memmodel
5347 get_memmodel (tree exp
)
5350 unsigned HOST_WIDE_INT val
;
5352 /* If the parameter is not a constant, it's a run time value so we'll just
5353 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5354 if (TREE_CODE (exp
) != INTEGER_CST
)
5355 return MEMMODEL_SEQ_CST
;
5357 op
= expand_normal (exp
);
5360 if (targetm
.memmodel_check
)
5361 val
= targetm
.memmodel_check (val
);
5362 else if (val
& ~MEMMODEL_MASK
)
5364 warning (OPT_Winvalid_memory_model
,
5365 "Unknown architecture specifier in memory model to builtin.");
5366 return MEMMODEL_SEQ_CST
;
5369 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5371 warning (OPT_Winvalid_memory_model
,
5372 "invalid memory model argument to builtin");
5373 return MEMMODEL_SEQ_CST
;
5376 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5377 be conservative and promote consume to acquire. */
5378 if (val
== MEMMODEL_CONSUME
)
5379 val
= MEMMODEL_ACQUIRE
;
5381 return (enum memmodel
) val
;
5384 /* Expand the __atomic_exchange intrinsic:
5385 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5390 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5393 enum memmodel model
;
5395 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5397 if (!flag_inline_atomics
)
5400 /* Expand the operands. */
5401 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5402 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5404 return expand_atomic_exchange (target
, mem
, val
, model
);
5407 /* Expand the __atomic_compare_exchange intrinsic:
5408 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5409 TYPE desired, BOOL weak,
5410 enum memmodel success,
5411 enum memmodel failure)
5412 EXP is the CALL_EXPR.
5413 TARGET is an optional place for us to store the results. */
5416 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5419 rtx expect
, desired
, mem
, oldval
;
5420 rtx_code_label
*label
;
5421 enum memmodel success
, failure
;
5425 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5426 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5428 if (failure
> success
)
5430 warning (OPT_Winvalid_memory_model
,
5431 "failure memory model cannot be stronger than success memory "
5432 "model for %<__atomic_compare_exchange%>");
5433 success
= MEMMODEL_SEQ_CST
;
5436 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5437 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5439 warning (OPT_Winvalid_memory_model
,
5440 "invalid failure memory model for "
5441 "%<__atomic_compare_exchange%>");
5442 failure
= MEMMODEL_SEQ_CST
;
5443 success
= MEMMODEL_SEQ_CST
;
5447 if (!flag_inline_atomics
)
5450 /* Expand the operands. */
5451 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5453 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5454 expect
= convert_memory_address (Pmode
, expect
);
5455 expect
= gen_rtx_MEM (mode
, expect
);
5456 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5458 weak
= CALL_EXPR_ARG (exp
, 3);
5460 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5463 if (target
== const0_rtx
)
5466 /* Lest the rtl backend create a race condition with an imporoper store
5467 to memory, always create a new pseudo for OLDVAL. */
5470 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5471 is_weak
, success
, failure
))
5474 /* Conditionally store back to EXPECT, lest we create a race condition
5475 with an improper store to memory. */
5476 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5477 the normal case where EXPECT is totally private, i.e. a register. At
5478 which point the store can be unconditional. */
5479 label
= gen_label_rtx ();
5480 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
, VOIDmode
, 1, label
);
5481 emit_move_insn (expect
, oldval
);
5487 /* Expand the __atomic_load intrinsic:
5488 TYPE __atomic_load (TYPE *object, enum memmodel)
5489 EXP is the CALL_EXPR.
5490 TARGET is an optional place for us to store the results. */
5493 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5496 enum memmodel model
;
5498 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5499 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5500 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5502 warning (OPT_Winvalid_memory_model
,
5503 "invalid memory model for %<__atomic_load%>");
5504 model
= MEMMODEL_SEQ_CST
;
5507 if (!flag_inline_atomics
)
5510 /* Expand the operand. */
5511 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5513 return expand_atomic_load (target
, mem
, model
);
5517 /* Expand the __atomic_store intrinsic:
5518 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5519 EXP is the CALL_EXPR.
5520 TARGET is an optional place for us to store the results. */
5523 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5526 enum memmodel model
;
5528 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5529 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5530 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5531 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5533 warning (OPT_Winvalid_memory_model
,
5534 "invalid memory model for %<__atomic_store%>");
5535 model
= MEMMODEL_SEQ_CST
;
5538 if (!flag_inline_atomics
)
5541 /* Expand the operands. */
5542 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5543 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5545 return expand_atomic_store (mem
, val
, model
, false);
5548 /* Expand the __atomic_fetch_XXX intrinsic:
5549 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5550 EXP is the CALL_EXPR.
5551 TARGET is an optional place for us to store the results.
5552 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5553 FETCH_AFTER is true if returning the result of the operation.
5554 FETCH_AFTER is false if returning the value before the operation.
5555 IGNORE is true if the result is not used.
5556 EXT_CALL is the correct builtin for an external call if this cannot be
5557 resolved to an instruction sequence. */
5560 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5561 enum rtx_code code
, bool fetch_after
,
5562 bool ignore
, enum built_in_function ext_call
)
5565 enum memmodel model
;
5569 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5571 /* Expand the operands. */
5572 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5573 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5575 /* Only try generating instructions if inlining is turned on. */
5576 if (flag_inline_atomics
)
5578 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5583 /* Return if a different routine isn't needed for the library call. */
5584 if (ext_call
== BUILT_IN_NONE
)
5587 /* Change the call to the specified function. */
5588 fndecl
= get_callee_fndecl (exp
);
5589 addr
= CALL_EXPR_FN (exp
);
5592 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5593 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5595 /* Expand the call here so we can emit trailing code. */
5596 ret
= expand_call (exp
, target
, ignore
);
5598 /* Replace the original function just in case it matters. */
5599 TREE_OPERAND (addr
, 0) = fndecl
;
5601 /* Then issue the arithmetic correction to return the right result. */
5606 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5608 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5611 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5618 #ifndef HAVE_atomic_clear
5619 # define HAVE_atomic_clear 0
5620 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5623 /* Expand an atomic clear operation.
5624 void _atomic_clear (BOOL *obj, enum memmodel)
5625 EXP is the call expression. */
5628 expand_builtin_atomic_clear (tree exp
)
5632 enum memmodel model
;
5634 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5635 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5636 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5638 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
5639 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5640 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5642 warning (OPT_Winvalid_memory_model
,
5643 "invalid memory model for %<__atomic_store%>");
5644 model
= MEMMODEL_SEQ_CST
;
5647 if (HAVE_atomic_clear
)
5649 emit_insn (gen_atomic_clear (mem
, model
));
5653 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5654 Failing that, a store is issued by __atomic_store. The only way this can
5655 fail is if the bool type is larger than a word size. Unlikely, but
5656 handle it anyway for completeness. Assume a single threaded model since
5657 there is no atomic support in this case, and no barriers are required. */
5658 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5660 emit_move_insn (mem
, const0_rtx
);
5664 /* Expand an atomic test_and_set operation.
5665 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5666 EXP is the call expression. */
5669 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5672 enum memmodel model
;
5675 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5676 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5677 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5679 return expand_atomic_test_and_set (target
, mem
, model
);
5683 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5684 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5687 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5691 unsigned int mode_align
, type_align
;
5693 if (TREE_CODE (arg0
) != INTEGER_CST
)
5696 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5697 mode
= mode_for_size (size
, MODE_INT
, 0);
5698 mode_align
= GET_MODE_ALIGNMENT (mode
);
5700 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5701 type_align
= mode_align
;
5704 tree ttype
= TREE_TYPE (arg1
);
5706 /* This function is usually invoked and folded immediately by the front
5707 end before anything else has a chance to look at it. The pointer
5708 parameter at this point is usually cast to a void *, so check for that
5709 and look past the cast. */
5710 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5711 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5712 arg1
= TREE_OPERAND (arg1
, 0);
5714 ttype
= TREE_TYPE (arg1
);
5715 gcc_assert (POINTER_TYPE_P (ttype
));
5717 /* Get the underlying type of the object. */
5718 ttype
= TREE_TYPE (ttype
);
5719 type_align
= TYPE_ALIGN (ttype
);
5722 /* If the object has smaller alignment, the the lock free routines cannot
5724 if (type_align
< mode_align
)
5725 return boolean_false_node
;
5727 /* Check if a compare_and_swap pattern exists for the mode which represents
5728 the required size. The pattern is not allowed to fail, so the existence
5729 of the pattern indicates support is present. */
5730 if (can_compare_and_swap_p (mode
, true))
5731 return boolean_true_node
;
5733 return boolean_false_node
;
5736 /* Return true if the parameters to call EXP represent an object which will
5737 always generate lock free instructions. The first argument represents the
5738 size of the object, and the second parameter is a pointer to the object
5739 itself. If NULL is passed for the object, then the result is based on
5740 typical alignment for an object of the specified size. Otherwise return
5744 expand_builtin_atomic_always_lock_free (tree exp
)
5747 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5748 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5750 if (TREE_CODE (arg0
) != INTEGER_CST
)
5752 error ("non-constant argument 1 to __atomic_always_lock_free");
5756 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5757 if (size
== boolean_true_node
)
5762 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5763 is lock free on this architecture. */
5766 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5768 if (!flag_inline_atomics
)
5771 /* If it isn't always lock free, don't generate a result. */
5772 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5773 return boolean_true_node
;
5778 /* Return true if the parameters to call EXP represent an object which will
5779 always generate lock free instructions. The first argument represents the
5780 size of the object, and the second parameter is a pointer to the object
5781 itself. If NULL is passed for the object, then the result is based on
5782 typical alignment for an object of the specified size. Otherwise return
5786 expand_builtin_atomic_is_lock_free (tree exp
)
5789 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5790 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5792 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5794 error ("non-integer argument 1 to __atomic_is_lock_free");
5798 if (!flag_inline_atomics
)
5801 /* If the value is known at compile time, return the RTX for it. */
5802 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5803 if (size
== boolean_true_node
)
5809 /* Expand the __atomic_thread_fence intrinsic:
5810 void __atomic_thread_fence (enum memmodel)
5811 EXP is the CALL_EXPR. */
5814 expand_builtin_atomic_thread_fence (tree exp
)
5816 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5817 expand_mem_thread_fence (model
);
5820 /* Expand the __atomic_signal_fence intrinsic:
5821 void __atomic_signal_fence (enum memmodel)
5822 EXP is the CALL_EXPR. */
5825 expand_builtin_atomic_signal_fence (tree exp
)
5827 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5828 expand_mem_signal_fence (model
);
5831 /* Expand the __sync_synchronize intrinsic. */
5834 expand_builtin_sync_synchronize (void)
5836 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5840 expand_builtin_thread_pointer (tree exp
, rtx target
)
5842 enum insn_code icode
;
5843 if (!validate_arglist (exp
, VOID_TYPE
))
5845 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5846 if (icode
!= CODE_FOR_nothing
)
5848 struct expand_operand op
;
5849 /* If the target is not sutitable then create a new target. */
5850 if (target
== NULL_RTX
5852 || GET_MODE (target
) != Pmode
)
5853 target
= gen_reg_rtx (Pmode
);
5854 create_output_operand (&op
, target
, Pmode
);
5855 expand_insn (icode
, 1, &op
);
5858 error ("__builtin_thread_pointer is not supported on this target");
5863 expand_builtin_set_thread_pointer (tree exp
)
5865 enum insn_code icode
;
5866 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5868 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5869 if (icode
!= CODE_FOR_nothing
)
5871 struct expand_operand op
;
5872 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5873 Pmode
, EXPAND_NORMAL
);
5874 create_input_operand (&op
, val
, Pmode
);
5875 expand_insn (icode
, 1, &op
);
5878 error ("__builtin_set_thread_pointer is not supported on this target");
5882 /* Emit code to restore the current value of stack. */
5885 expand_stack_restore (tree var
)
5888 rtx sa
= expand_normal (var
);
5890 sa
= convert_memory_address (Pmode
, sa
);
5892 prev
= get_last_insn ();
5893 emit_stack_restore (SAVE_BLOCK
, sa
);
5894 fixup_args_size_notes (prev
, get_last_insn (), 0);
5898 /* Emit code to save the current value of stack. */
5901 expand_stack_save (void)
5905 do_pending_stack_adjust ();
5906 emit_stack_save (SAVE_BLOCK
, &ret
);
5911 /* Expand OpenACC acc_on_device.
5913 This has to happen late (that is, not in early folding; expand_builtin_*,
5914 rather than fold_builtin_*), as we have to act differently for host and
5915 acceleration device (ACCEL_COMPILER conditional). */
5918 expand_builtin_acc_on_device (tree exp
, rtx target
)
5920 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5923 tree arg
= CALL_EXPR_ARG (exp
, 0);
5925 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5926 machine_mode v_mode
= TYPE_MODE (TREE_TYPE (arg
));
5927 rtx v
= expand_normal (arg
), v1
, v2
;
5928 #ifdef ACCEL_COMPILER
5929 v1
= GEN_INT (GOMP_DEVICE_NOT_HOST
);
5930 v2
= GEN_INT (ACCEL_COMPILER_acc_device
);
5932 v1
= GEN_INT (GOMP_DEVICE_NONE
);
5933 v2
= GEN_INT (GOMP_DEVICE_HOST
);
5935 machine_mode target_mode
= TYPE_MODE (integer_type_node
);
5936 if (!target
|| !register_operand (target
, target_mode
))
5937 target
= gen_reg_rtx (target_mode
);
5938 emit_move_insn (target
, const1_rtx
);
5939 rtx_code_label
*done_label
= gen_label_rtx ();
5940 do_compare_rtx_and_jump (v
, v1
, EQ
, false, v_mode
, NULL_RTX
,
5941 NULL_RTX
, done_label
, PROB_EVEN
);
5942 do_compare_rtx_and_jump (v
, v2
, EQ
, false, v_mode
, NULL_RTX
,
5943 NULL_RTX
, done_label
, PROB_EVEN
);
5944 emit_move_insn (target
, const0_rtx
);
5945 emit_label (done_label
);
5951 /* Expand an expression EXP that calls a built-in function,
5952 with result going to TARGET if that's convenient
5953 (and in mode MODE if that's convenient).
5954 SUBTARGET may be used as the target for computing one of EXP's operands.
5955 IGNORE is nonzero if the value is to be ignored. */
5958 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5961 tree fndecl
= get_callee_fndecl (exp
);
5962 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5963 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5966 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5967 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5969 /* When ASan is enabled, we don't want to expand some memory/string
5970 builtins and rely on libsanitizer's hooks. This allows us to avoid
5971 redundant checks and be sure, that possible overflow will be detected
5974 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5975 return expand_call (exp
, target
, ignore
);
5977 /* When not optimizing, generate calls to library functions for a certain
5980 && !called_as_built_in (fndecl
)
5981 && fcode
!= BUILT_IN_FORK
5982 && fcode
!= BUILT_IN_EXECL
5983 && fcode
!= BUILT_IN_EXECV
5984 && fcode
!= BUILT_IN_EXECLP
5985 && fcode
!= BUILT_IN_EXECLE
5986 && fcode
!= BUILT_IN_EXECVP
5987 && fcode
!= BUILT_IN_EXECVE
5988 && fcode
!= BUILT_IN_ALLOCA
5989 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5990 && fcode
!= BUILT_IN_FREE
5991 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5992 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5993 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5994 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5995 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5996 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5997 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5998 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5999 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6000 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
6001 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
6002 && fcode
!= BUILT_IN_CHKP_BNDRET
)
6003 return expand_call (exp
, target
, ignore
);
6005 /* The built-in function expanders test for target == const0_rtx
6006 to determine whether the function's result will be ignored. */
6008 target
= const0_rtx
;
6010 /* If the result of a pure or const built-in function is ignored, and
6011 none of its arguments are volatile, we can avoid expanding the
6012 built-in call and just evaluate the arguments for side-effects. */
6013 if (target
== const0_rtx
6014 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6015 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6017 bool volatilep
= false;
6019 call_expr_arg_iterator iter
;
6021 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6022 if (TREE_THIS_VOLATILE (arg
))
6030 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6031 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6036 /* expand_builtin_with_bounds is supposed to be used for
6037 instrumented builtin calls. */
6038 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6042 CASE_FLT_FN (BUILT_IN_FABS
):
6043 case BUILT_IN_FABSD32
:
6044 case BUILT_IN_FABSD64
:
6045 case BUILT_IN_FABSD128
:
6046 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6051 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6052 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6057 /* Just do a normal library call if we were unable to fold
6059 CASE_FLT_FN (BUILT_IN_CABS
):
6062 CASE_FLT_FN (BUILT_IN_EXP
):
6063 CASE_FLT_FN (BUILT_IN_EXP10
):
6064 CASE_FLT_FN (BUILT_IN_POW10
):
6065 CASE_FLT_FN (BUILT_IN_EXP2
):
6066 CASE_FLT_FN (BUILT_IN_EXPM1
):
6067 CASE_FLT_FN (BUILT_IN_LOGB
):
6068 CASE_FLT_FN (BUILT_IN_LOG
):
6069 CASE_FLT_FN (BUILT_IN_LOG10
):
6070 CASE_FLT_FN (BUILT_IN_LOG2
):
6071 CASE_FLT_FN (BUILT_IN_LOG1P
):
6072 CASE_FLT_FN (BUILT_IN_TAN
):
6073 CASE_FLT_FN (BUILT_IN_ASIN
):
6074 CASE_FLT_FN (BUILT_IN_ACOS
):
6075 CASE_FLT_FN (BUILT_IN_ATAN
):
6076 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
6077 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6078 because of possible accuracy problems. */
6079 if (! flag_unsafe_math_optimizations
)
6081 CASE_FLT_FN (BUILT_IN_SQRT
):
6082 CASE_FLT_FN (BUILT_IN_FLOOR
):
6083 CASE_FLT_FN (BUILT_IN_CEIL
):
6084 CASE_FLT_FN (BUILT_IN_TRUNC
):
6085 CASE_FLT_FN (BUILT_IN_ROUND
):
6086 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6087 CASE_FLT_FN (BUILT_IN_RINT
):
6088 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6093 CASE_FLT_FN (BUILT_IN_FMA
):
6094 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6099 CASE_FLT_FN (BUILT_IN_ILOGB
):
6100 if (! flag_unsafe_math_optimizations
)
6102 CASE_FLT_FN (BUILT_IN_ISINF
):
6103 CASE_FLT_FN (BUILT_IN_FINITE
):
6104 case BUILT_IN_ISFINITE
:
6105 case BUILT_IN_ISNORMAL
:
6106 target
= expand_builtin_interclass_mathfn (exp
, target
);
6111 CASE_FLT_FN (BUILT_IN_ICEIL
):
6112 CASE_FLT_FN (BUILT_IN_LCEIL
):
6113 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6114 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6115 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6116 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6117 target
= expand_builtin_int_roundingfn (exp
, target
);
6122 CASE_FLT_FN (BUILT_IN_IRINT
):
6123 CASE_FLT_FN (BUILT_IN_LRINT
):
6124 CASE_FLT_FN (BUILT_IN_LLRINT
):
6125 CASE_FLT_FN (BUILT_IN_IROUND
):
6126 CASE_FLT_FN (BUILT_IN_LROUND
):
6127 CASE_FLT_FN (BUILT_IN_LLROUND
):
6128 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6133 CASE_FLT_FN (BUILT_IN_POWI
):
6134 target
= expand_builtin_powi (exp
, target
);
6139 CASE_FLT_FN (BUILT_IN_ATAN2
):
6140 CASE_FLT_FN (BUILT_IN_LDEXP
):
6141 CASE_FLT_FN (BUILT_IN_SCALB
):
6142 CASE_FLT_FN (BUILT_IN_SCALBN
):
6143 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6144 if (! flag_unsafe_math_optimizations
)
6147 CASE_FLT_FN (BUILT_IN_FMOD
):
6148 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6149 CASE_FLT_FN (BUILT_IN_DREM
):
6150 CASE_FLT_FN (BUILT_IN_POW
):
6151 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6156 CASE_FLT_FN (BUILT_IN_CEXPI
):
6157 target
= expand_builtin_cexpi (exp
, target
);
6158 gcc_assert (target
);
6161 CASE_FLT_FN (BUILT_IN_SIN
):
6162 CASE_FLT_FN (BUILT_IN_COS
):
6163 if (! flag_unsafe_math_optimizations
)
6165 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6170 CASE_FLT_FN (BUILT_IN_SINCOS
):
6171 if (! flag_unsafe_math_optimizations
)
6173 target
= expand_builtin_sincos (exp
);
6178 case BUILT_IN_APPLY_ARGS
:
6179 return expand_builtin_apply_args ();
6181 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6182 FUNCTION with a copy of the parameters described by
6183 ARGUMENTS, and ARGSIZE. It returns a block of memory
6184 allocated on the stack into which is stored all the registers
6185 that might possibly be used for returning the result of a
6186 function. ARGUMENTS is the value returned by
6187 __builtin_apply_args. ARGSIZE is the number of bytes of
6188 arguments that must be copied. ??? How should this value be
6189 computed? We'll also need a safe worst case value for varargs
6191 case BUILT_IN_APPLY
:
6192 if (!validate_arglist (exp
, POINTER_TYPE
,
6193 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6194 && !validate_arglist (exp
, REFERENCE_TYPE
,
6195 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6201 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6202 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6203 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6205 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6208 /* __builtin_return (RESULT) causes the function to return the
6209 value described by RESULT. RESULT is address of the block of
6210 memory returned by __builtin_apply. */
6211 case BUILT_IN_RETURN
:
6212 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6213 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6216 case BUILT_IN_SAVEREGS
:
6217 return expand_builtin_saveregs ();
6219 case BUILT_IN_VA_ARG_PACK
:
6220 /* All valid uses of __builtin_va_arg_pack () are removed during
6222 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6225 case BUILT_IN_VA_ARG_PACK_LEN
:
6226 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6228 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6231 /* Return the address of the first anonymous stack arg. */
6232 case BUILT_IN_NEXT_ARG
:
6233 if (fold_builtin_next_arg (exp
, false))
6235 return expand_builtin_next_arg ();
6237 case BUILT_IN_CLEAR_CACHE
:
6238 target
= expand_builtin___clear_cache (exp
);
6243 case BUILT_IN_CLASSIFY_TYPE
:
6244 return expand_builtin_classify_type (exp
);
6246 case BUILT_IN_CONSTANT_P
:
6249 case BUILT_IN_FRAME_ADDRESS
:
6250 case BUILT_IN_RETURN_ADDRESS
:
6251 return expand_builtin_frame_address (fndecl
, exp
);
6253 /* Returns the address of the area where the structure is returned.
6255 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6256 if (call_expr_nargs (exp
) != 0
6257 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6258 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6261 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6263 case BUILT_IN_ALLOCA
:
6264 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6265 /* If the allocation stems from the declaration of a variable-sized
6266 object, it cannot accumulate. */
6267 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6272 case BUILT_IN_STACK_SAVE
:
6273 return expand_stack_save ();
6275 case BUILT_IN_STACK_RESTORE
:
6276 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6279 case BUILT_IN_BSWAP16
:
6280 case BUILT_IN_BSWAP32
:
6281 case BUILT_IN_BSWAP64
:
6282 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6287 CASE_INT_FN (BUILT_IN_FFS
):
6288 target
= expand_builtin_unop (target_mode
, exp
, target
,
6289 subtarget
, ffs_optab
);
6294 CASE_INT_FN (BUILT_IN_CLZ
):
6295 target
= expand_builtin_unop (target_mode
, exp
, target
,
6296 subtarget
, clz_optab
);
6301 CASE_INT_FN (BUILT_IN_CTZ
):
6302 target
= expand_builtin_unop (target_mode
, exp
, target
,
6303 subtarget
, ctz_optab
);
6308 CASE_INT_FN (BUILT_IN_CLRSB
):
6309 target
= expand_builtin_unop (target_mode
, exp
, target
,
6310 subtarget
, clrsb_optab
);
6315 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6316 target
= expand_builtin_unop (target_mode
, exp
, target
,
6317 subtarget
, popcount_optab
);
6322 CASE_INT_FN (BUILT_IN_PARITY
):
6323 target
= expand_builtin_unop (target_mode
, exp
, target
,
6324 subtarget
, parity_optab
);
6329 case BUILT_IN_STRLEN
:
6330 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6335 case BUILT_IN_STRCPY
:
6336 target
= expand_builtin_strcpy (exp
, target
);
6341 case BUILT_IN_STRNCPY
:
6342 target
= expand_builtin_strncpy (exp
, target
);
6347 case BUILT_IN_STPCPY
:
6348 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6353 case BUILT_IN_MEMCPY
:
6354 target
= expand_builtin_memcpy (exp
, target
);
6359 case BUILT_IN_MEMPCPY
:
6360 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6365 case BUILT_IN_MEMSET
:
6366 target
= expand_builtin_memset (exp
, target
, mode
);
6371 case BUILT_IN_BZERO
:
6372 target
= expand_builtin_bzero (exp
);
6377 case BUILT_IN_STRCMP
:
6378 target
= expand_builtin_strcmp (exp
, target
);
6383 case BUILT_IN_STRNCMP
:
6384 target
= expand_builtin_strncmp (exp
, target
, mode
);
6390 case BUILT_IN_MEMCMP
:
6391 target
= expand_builtin_memcmp (exp
, target
, mode
);
6396 case BUILT_IN_SETJMP
:
6397 /* This should have been lowered to the builtins below. */
6400 case BUILT_IN_SETJMP_SETUP
:
6401 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6402 and the receiver label. */
6403 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6405 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6406 VOIDmode
, EXPAND_NORMAL
);
6407 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6408 rtx label_r
= label_rtx (label
);
6410 /* This is copied from the handling of non-local gotos. */
6411 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6412 nonlocal_goto_handler_labels
6413 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6414 nonlocal_goto_handler_labels
);
6415 /* ??? Do not let expand_label treat us as such since we would
6416 not want to be both on the list of non-local labels and on
6417 the list of forced labels. */
6418 FORCED_LABEL (label
) = 0;
6423 case BUILT_IN_SETJMP_RECEIVER
:
6424 /* __builtin_setjmp_receiver is passed the receiver label. */
6425 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6427 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6428 rtx label_r
= label_rtx (label
);
6430 expand_builtin_setjmp_receiver (label_r
);
6435 /* __builtin_longjmp is passed a pointer to an array of five words.
6436 It's similar to the C library longjmp function but works with
6437 __builtin_setjmp above. */
6438 case BUILT_IN_LONGJMP
:
6439 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6441 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6442 VOIDmode
, EXPAND_NORMAL
);
6443 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6445 if (value
!= const1_rtx
)
6447 error ("%<__builtin_longjmp%> second argument must be 1");
6451 expand_builtin_longjmp (buf_addr
, value
);
6456 case BUILT_IN_NONLOCAL_GOTO
:
6457 target
= expand_builtin_nonlocal_goto (exp
);
6462 /* This updates the setjmp buffer that is its argument with the value
6463 of the current stack pointer. */
6464 case BUILT_IN_UPDATE_SETJMP_BUF
:
6465 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6468 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6470 expand_builtin_update_setjmp_buf (buf_addr
);
6476 expand_builtin_trap ();
6479 case BUILT_IN_UNREACHABLE
:
6480 expand_builtin_unreachable ();
6483 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6484 case BUILT_IN_SIGNBITD32
:
6485 case BUILT_IN_SIGNBITD64
:
6486 case BUILT_IN_SIGNBITD128
:
6487 target
= expand_builtin_signbit (exp
, target
);
6492 /* Various hooks for the DWARF 2 __throw routine. */
6493 case BUILT_IN_UNWIND_INIT
:
6494 expand_builtin_unwind_init ();
6496 case BUILT_IN_DWARF_CFA
:
6497 return virtual_cfa_rtx
;
6498 #ifdef DWARF2_UNWIND_INFO
6499 case BUILT_IN_DWARF_SP_COLUMN
:
6500 return expand_builtin_dwarf_sp_column ();
6501 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6502 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6505 case BUILT_IN_FROB_RETURN_ADDR
:
6506 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6507 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6508 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6509 case BUILT_IN_EH_RETURN
:
6510 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6511 CALL_EXPR_ARG (exp
, 1));
6513 #ifdef EH_RETURN_DATA_REGNO
6514 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6515 return expand_builtin_eh_return_data_regno (exp
);
6517 case BUILT_IN_EXTEND_POINTER
:
6518 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6519 case BUILT_IN_EH_POINTER
:
6520 return expand_builtin_eh_pointer (exp
);
6521 case BUILT_IN_EH_FILTER
:
6522 return expand_builtin_eh_filter (exp
);
6523 case BUILT_IN_EH_COPY_VALUES
:
6524 return expand_builtin_eh_copy_values (exp
);
6526 case BUILT_IN_VA_START
:
6527 return expand_builtin_va_start (exp
);
6528 case BUILT_IN_VA_END
:
6529 return expand_builtin_va_end (exp
);
6530 case BUILT_IN_VA_COPY
:
6531 return expand_builtin_va_copy (exp
);
6532 case BUILT_IN_EXPECT
:
6533 return expand_builtin_expect (exp
, target
);
6534 case BUILT_IN_ASSUME_ALIGNED
:
6535 return expand_builtin_assume_aligned (exp
, target
);
6536 case BUILT_IN_PREFETCH
:
6537 expand_builtin_prefetch (exp
);
6540 case BUILT_IN_INIT_TRAMPOLINE
:
6541 return expand_builtin_init_trampoline (exp
, true);
6542 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6543 return expand_builtin_init_trampoline (exp
, false);
6544 case BUILT_IN_ADJUST_TRAMPOLINE
:
6545 return expand_builtin_adjust_trampoline (exp
);
6548 case BUILT_IN_EXECL
:
6549 case BUILT_IN_EXECV
:
6550 case BUILT_IN_EXECLP
:
6551 case BUILT_IN_EXECLE
:
6552 case BUILT_IN_EXECVP
:
6553 case BUILT_IN_EXECVE
:
6554 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6559 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6560 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6561 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6562 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6563 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6564 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6565 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6570 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6571 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6572 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6573 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6574 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6575 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6576 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6581 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6582 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6583 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6584 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6585 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6586 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6587 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6592 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6593 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6594 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6595 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6596 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6597 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6598 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6603 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6604 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6605 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6606 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6607 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6608 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6609 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6614 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6615 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6616 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6617 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6618 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6619 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6620 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6625 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6626 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6627 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6628 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6629 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6630 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6631 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6636 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6637 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6638 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6639 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6640 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6641 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6642 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6647 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6648 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6649 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6650 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6651 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6652 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6653 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6658 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6659 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6660 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6661 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6662 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6663 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6664 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6669 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6670 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6671 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6672 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6673 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6674 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6675 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6680 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6681 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6682 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6683 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6684 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6685 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6686 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6693 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6694 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6695 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6696 if (mode
== VOIDmode
)
6697 mode
= TYPE_MODE (boolean_type_node
);
6698 if (!target
|| !register_operand (target
, mode
))
6699 target
= gen_reg_rtx (mode
);
6701 mode
= get_builtin_sync_mode
6702 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6703 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6708 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6709 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6710 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6711 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6712 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6713 mode
= get_builtin_sync_mode
6714 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6715 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6720 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6721 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6722 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6723 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6724 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6725 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6726 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6731 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6732 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6733 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6734 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6735 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6736 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6737 expand_builtin_sync_lock_release (mode
, exp
);
6740 case BUILT_IN_SYNC_SYNCHRONIZE
:
6741 expand_builtin_sync_synchronize ();
6744 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6745 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6746 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6747 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6748 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6749 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6750 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6755 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6756 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6757 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6758 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6759 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6761 unsigned int nargs
, z
;
6762 vec
<tree
, va_gc
> *vec
;
6765 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6766 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6770 /* If this is turned into an external library call, the weak parameter
6771 must be dropped to match the expected parameter list. */
6772 nargs
= call_expr_nargs (exp
);
6773 vec_alloc (vec
, nargs
- 1);
6774 for (z
= 0; z
< 3; z
++)
6775 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6776 /* Skip the boolean weak parameter. */
6777 for (z
= 4; z
< 6; z
++)
6778 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6779 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6783 case BUILT_IN_ATOMIC_LOAD_1
:
6784 case BUILT_IN_ATOMIC_LOAD_2
:
6785 case BUILT_IN_ATOMIC_LOAD_4
:
6786 case BUILT_IN_ATOMIC_LOAD_8
:
6787 case BUILT_IN_ATOMIC_LOAD_16
:
6788 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6789 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6794 case BUILT_IN_ATOMIC_STORE_1
:
6795 case BUILT_IN_ATOMIC_STORE_2
:
6796 case BUILT_IN_ATOMIC_STORE_4
:
6797 case BUILT_IN_ATOMIC_STORE_8
:
6798 case BUILT_IN_ATOMIC_STORE_16
:
6799 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6800 target
= expand_builtin_atomic_store (mode
, exp
);
6805 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6806 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6807 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6808 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6809 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6811 enum built_in_function lib
;
6812 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6813 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6814 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6815 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6821 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6822 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6823 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6824 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6825 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6827 enum built_in_function lib
;
6828 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6829 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6830 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6831 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6837 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6838 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6839 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6840 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6841 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6843 enum built_in_function lib
;
6844 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6845 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6846 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6847 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6853 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6854 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6855 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6856 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6857 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6859 enum built_in_function lib
;
6860 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6861 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6862 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6863 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6869 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6870 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6871 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6872 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6873 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6875 enum built_in_function lib
;
6876 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6877 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6878 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6879 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6885 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6886 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6887 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6888 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6889 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6891 enum built_in_function lib
;
6892 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6893 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6894 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6895 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6901 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6902 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6903 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6904 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6905 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6906 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6907 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6908 ignore
, BUILT_IN_NONE
);
6913 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6914 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6915 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6916 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6917 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6918 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6919 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6920 ignore
, BUILT_IN_NONE
);
6925 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6926 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6927 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6928 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6929 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6930 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6931 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6932 ignore
, BUILT_IN_NONE
);
6937 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6938 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6939 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6940 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6941 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6942 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6943 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6944 ignore
, BUILT_IN_NONE
);
6949 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6950 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6951 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6952 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6953 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6954 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6955 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6956 ignore
, BUILT_IN_NONE
);
6961 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6962 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6963 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6964 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6965 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6966 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6967 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6968 ignore
, BUILT_IN_NONE
);
6973 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6974 return expand_builtin_atomic_test_and_set (exp
, target
);
6976 case BUILT_IN_ATOMIC_CLEAR
:
6977 return expand_builtin_atomic_clear (exp
);
6979 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6980 return expand_builtin_atomic_always_lock_free (exp
);
6982 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6983 target
= expand_builtin_atomic_is_lock_free (exp
);
6988 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6989 expand_builtin_atomic_thread_fence (exp
);
6992 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6993 expand_builtin_atomic_signal_fence (exp
);
6996 case BUILT_IN_OBJECT_SIZE
:
6997 return expand_builtin_object_size (exp
);
6999 case BUILT_IN_MEMCPY_CHK
:
7000 case BUILT_IN_MEMPCPY_CHK
:
7001 case BUILT_IN_MEMMOVE_CHK
:
7002 case BUILT_IN_MEMSET_CHK
:
7003 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7008 case BUILT_IN_STRCPY_CHK
:
7009 case BUILT_IN_STPCPY_CHK
:
7010 case BUILT_IN_STRNCPY_CHK
:
7011 case BUILT_IN_STPNCPY_CHK
:
7012 case BUILT_IN_STRCAT_CHK
:
7013 case BUILT_IN_STRNCAT_CHK
:
7014 case BUILT_IN_SNPRINTF_CHK
:
7015 case BUILT_IN_VSNPRINTF_CHK
:
7016 maybe_emit_chk_warning (exp
, fcode
);
7019 case BUILT_IN_SPRINTF_CHK
:
7020 case BUILT_IN_VSPRINTF_CHK
:
7021 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7025 if (warn_free_nonheap_object
)
7026 maybe_emit_free_warning (exp
);
7029 case BUILT_IN_THREAD_POINTER
:
7030 return expand_builtin_thread_pointer (exp
, target
);
7032 case BUILT_IN_SET_THREAD_POINTER
:
7033 expand_builtin_set_thread_pointer (exp
);
7036 case BUILT_IN_CILK_DETACH
:
7037 expand_builtin_cilk_detach (exp
);
7040 case BUILT_IN_CILK_POP_FRAME
:
7041 expand_builtin_cilk_pop_frame (exp
);
7044 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7045 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7046 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7047 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7048 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7049 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7050 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7051 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7052 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7053 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7054 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7055 /* We allow user CHKP builtins if Pointer Bounds
7057 if (!chkp_function_instrumented_p (current_function_decl
))
7059 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7060 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7061 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7062 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7063 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7064 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7065 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7066 return expand_normal (size_zero_node
);
7067 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7068 return expand_normal (size_int (-1));
7074 case BUILT_IN_CHKP_BNDMK
:
7075 case BUILT_IN_CHKP_BNDSTX
:
7076 case BUILT_IN_CHKP_BNDCL
:
7077 case BUILT_IN_CHKP_BNDCU
:
7078 case BUILT_IN_CHKP_BNDLDX
:
7079 case BUILT_IN_CHKP_BNDRET
:
7080 case BUILT_IN_CHKP_INTERSECT
:
7081 case BUILT_IN_CHKP_NARROW
:
7082 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7083 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7084 /* Software implementation of Pointer Bounds Checker is NYI.
7085 Target support is required. */
7086 error ("Your target platform does not support -fcheck-pointer-bounds");
7089 case BUILT_IN_ACC_ON_DEVICE
:
7090 target
= expand_builtin_acc_on_device (exp
, target
);
7095 default: /* just do library call, if unknown builtin */
7099 /* The switch statement above can drop through to cause the function
7100 to be called normally. */
7101 return expand_call (exp
, target
, ignore
);
7104 /* Similar to expand_builtin but is used for instrumented calls. */
7107 expand_builtin_with_bounds (tree exp
, rtx target
,
7108 rtx subtarget ATTRIBUTE_UNUSED
,
7109 machine_mode mode
, int ignore
)
7111 tree fndecl
= get_callee_fndecl (exp
);
7112 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7114 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7116 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7117 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7119 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7120 && fcode
< END_CHKP_BUILTINS
);
7124 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7125 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7130 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7131 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7136 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7137 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7146 /* The switch statement above can drop through to cause the function
7147 to be called normally. */
7148 return expand_call (exp
, target
, ignore
);
7151 /* Determine whether a tree node represents a call to a built-in
7152 function. If the tree T is a call to a built-in function with
7153 the right number of arguments of the appropriate types, return
7154 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7155 Otherwise the return value is END_BUILTINS. */
7157 enum built_in_function
7158 builtin_mathfn_code (const_tree t
)
7160 const_tree fndecl
, arg
, parmlist
;
7161 const_tree argtype
, parmtype
;
7162 const_call_expr_arg_iterator iter
;
7164 if (TREE_CODE (t
) != CALL_EXPR
7165 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7166 return END_BUILTINS
;
7168 fndecl
= get_callee_fndecl (t
);
7169 if (fndecl
== NULL_TREE
7170 || TREE_CODE (fndecl
) != FUNCTION_DECL
7171 || ! DECL_BUILT_IN (fndecl
)
7172 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7173 return END_BUILTINS
;
7175 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7176 init_const_call_expr_arg_iterator (t
, &iter
);
7177 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7179 /* If a function doesn't take a variable number of arguments,
7180 the last element in the list will have type `void'. */
7181 parmtype
= TREE_VALUE (parmlist
);
7182 if (VOID_TYPE_P (parmtype
))
7184 if (more_const_call_expr_args_p (&iter
))
7185 return END_BUILTINS
;
7186 return DECL_FUNCTION_CODE (fndecl
);
7189 if (! more_const_call_expr_args_p (&iter
))
7190 return END_BUILTINS
;
7192 arg
= next_const_call_expr_arg (&iter
);
7193 argtype
= TREE_TYPE (arg
);
7195 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7197 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7198 return END_BUILTINS
;
7200 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7202 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7203 return END_BUILTINS
;
7205 else if (POINTER_TYPE_P (parmtype
))
7207 if (! POINTER_TYPE_P (argtype
))
7208 return END_BUILTINS
;
7210 else if (INTEGRAL_TYPE_P (parmtype
))
7212 if (! INTEGRAL_TYPE_P (argtype
))
7213 return END_BUILTINS
;
7216 return END_BUILTINS
;
7219 /* Variable-length argument list. */
7220 return DECL_FUNCTION_CODE (fndecl
);
7223 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7224 evaluate to a constant. */
7227 fold_builtin_constant_p (tree arg
)
7229 /* We return 1 for a numeric type that's known to be a constant
7230 value at compile-time or for an aggregate type that's a
7231 literal constant. */
7234 /* If we know this is a constant, emit the constant of one. */
7235 if (CONSTANT_CLASS_P (arg
)
7236 || (TREE_CODE (arg
) == CONSTRUCTOR
7237 && TREE_CONSTANT (arg
)))
7238 return integer_one_node
;
7239 if (TREE_CODE (arg
) == ADDR_EXPR
)
7241 tree op
= TREE_OPERAND (arg
, 0);
7242 if (TREE_CODE (op
) == STRING_CST
7243 || (TREE_CODE (op
) == ARRAY_REF
7244 && integer_zerop (TREE_OPERAND (op
, 1))
7245 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7246 return integer_one_node
;
7249 /* If this expression has side effects, show we don't know it to be a
7250 constant. Likewise if it's a pointer or aggregate type since in
7251 those case we only want literals, since those are only optimized
7252 when generating RTL, not later.
7253 And finally, if we are compiling an initializer, not code, we
7254 need to return a definite result now; there's not going to be any
7255 more optimization done. */
7256 if (TREE_SIDE_EFFECTS (arg
)
7257 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7258 || POINTER_TYPE_P (TREE_TYPE (arg
))
7260 || folding_initializer
7261 || force_folding_builtin_constant_p
)
7262 return integer_zero_node
;
7267 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7268 return it as a truthvalue. */
7271 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7274 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7276 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7277 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7278 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7279 pred_type
= TREE_VALUE (arg_types
);
7280 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7282 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7283 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7284 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7287 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7288 build_int_cst (ret_type
, 0));
7291 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7292 NULL_TREE if no simplification is possible. */
7295 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7297 tree inner
, fndecl
, inner_arg0
;
7298 enum tree_code code
;
7300 /* Distribute the expected value over short-circuiting operators.
7301 See through the cast from truthvalue_type_node to long. */
7303 while (CONVERT_EXPR_P (inner_arg0
)
7304 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7305 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7306 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7308 /* If this is a builtin_expect within a builtin_expect keep the
7309 inner one. See through a comparison against a constant. It
7310 might have been added to create a thruthvalue. */
7313 if (COMPARISON_CLASS_P (inner
)
7314 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7315 inner
= TREE_OPERAND (inner
, 0);
7317 if (TREE_CODE (inner
) == CALL_EXPR
7318 && (fndecl
= get_callee_fndecl (inner
))
7319 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7320 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7324 code
= TREE_CODE (inner
);
7325 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7327 tree op0
= TREE_OPERAND (inner
, 0);
7328 tree op1
= TREE_OPERAND (inner
, 1);
7330 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7331 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7332 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7334 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7337 /* If the argument isn't invariant then there's nothing else we can do. */
7338 if (!TREE_CONSTANT (inner_arg0
))
7341 /* If we expect that a comparison against the argument will fold to
7342 a constant return the constant. In practice, this means a true
7343 constant or the address of a non-weak symbol. */
7346 if (TREE_CODE (inner
) == ADDR_EXPR
)
7350 inner
= TREE_OPERAND (inner
, 0);
7352 while (TREE_CODE (inner
) == COMPONENT_REF
7353 || TREE_CODE (inner
) == ARRAY_REF
);
7354 if ((TREE_CODE (inner
) == VAR_DECL
7355 || TREE_CODE (inner
) == FUNCTION_DECL
)
7356 && DECL_WEAK (inner
))
7360 /* Otherwise, ARG0 already has the proper type for the return value. */
7364 /* Fold a call to __builtin_classify_type with argument ARG. */
7367 fold_builtin_classify_type (tree arg
)
7370 return build_int_cst (integer_type_node
, no_type_class
);
7372 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7375 /* Fold a call to __builtin_strlen with argument ARG. */
7378 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7380 if (!validate_arg (arg
, POINTER_TYPE
))
7384 tree len
= c_strlen (arg
, 0);
7387 return fold_convert_loc (loc
, type
, len
);
7393 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7396 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7398 REAL_VALUE_TYPE real
;
7400 /* __builtin_inff is intended to be usable to define INFINITY on all
7401 targets. If an infinity is not available, INFINITY expands "to a
7402 positive constant of type float that overflows at translation
7403 time", footnote "In this case, using INFINITY will violate the
7404 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7405 Thus we pedwarn to ensure this constraint violation is
7407 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7408 pedwarn (loc
, 0, "target format does not support infinity");
7411 return build_real (type
, real
);
7414 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7417 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7419 REAL_VALUE_TYPE real
;
7422 if (!validate_arg (arg
, POINTER_TYPE
))
7424 str
= c_getstr (arg
);
7428 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7431 return build_real (type
, real
);
7434 /* Return true if the floating point expression T has an integer value.
7435 We also allow +Inf, -Inf and NaN to be considered integer values. */
7438 integer_valued_real_p (tree t
)
7440 switch (TREE_CODE (t
))
7447 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7452 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7459 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7460 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7463 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7464 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7467 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7471 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7472 if (TREE_CODE (type
) == INTEGER_TYPE
)
7474 if (TREE_CODE (type
) == REAL_TYPE
)
7475 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7480 switch (builtin_mathfn_code (t
))
7482 CASE_FLT_FN (BUILT_IN_CEIL
):
7483 CASE_FLT_FN (BUILT_IN_FLOOR
):
7484 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7485 CASE_FLT_FN (BUILT_IN_RINT
):
7486 CASE_FLT_FN (BUILT_IN_ROUND
):
7487 CASE_FLT_FN (BUILT_IN_TRUNC
):
7490 CASE_FLT_FN (BUILT_IN_FMIN
):
7491 CASE_FLT_FN (BUILT_IN_FMAX
):
7492 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7493 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7506 /* FNDECL is assumed to be a builtin where truncation can be propagated
7507 across (for instance floor((double)f) == (double)floorf (f).
7508 Do the transformation for a call with argument ARG. */
7511 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7513 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7515 if (!validate_arg (arg
, REAL_TYPE
))
7518 /* Integer rounding functions are idempotent. */
7519 if (fcode
== builtin_mathfn_code (arg
))
7522 /* If argument is already integer valued, and we don't need to worry
7523 about setting errno, there's no need to perform rounding. */
7524 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7529 tree arg0
= strip_float_extensions (arg
);
7530 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7531 tree newtype
= TREE_TYPE (arg0
);
7534 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7535 && (decl
= mathfn_built_in (newtype
, fcode
)))
7536 return fold_convert_loc (loc
, ftype
,
7537 build_call_expr_loc (loc
, decl
, 1,
7538 fold_convert_loc (loc
,
7545 /* FNDECL is assumed to be builtin which can narrow the FP type of
7546 the argument, for instance lround((double)f) -> lroundf (f).
7547 Do the transformation for a call with argument ARG. */
7550 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7552 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7554 if (!validate_arg (arg
, REAL_TYPE
))
7557 /* If argument is already integer valued, and we don't need to worry
7558 about setting errno, there's no need to perform rounding. */
7559 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7560 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7561 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7565 tree ftype
= TREE_TYPE (arg
);
7566 tree arg0
= strip_float_extensions (arg
);
7567 tree newtype
= TREE_TYPE (arg0
);
7570 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7571 && (decl
= mathfn_built_in (newtype
, fcode
)))
7572 return build_call_expr_loc (loc
, decl
, 1,
7573 fold_convert_loc (loc
, newtype
, arg0
));
7576 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7577 sizeof (int) == sizeof (long). */
7578 if (TYPE_PRECISION (integer_type_node
)
7579 == TYPE_PRECISION (long_integer_type_node
))
7581 tree newfn
= NULL_TREE
;
7584 CASE_FLT_FN (BUILT_IN_ICEIL
):
7585 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7588 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7589 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7592 CASE_FLT_FN (BUILT_IN_IROUND
):
7593 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7596 CASE_FLT_FN (BUILT_IN_IRINT
):
7597 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7606 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7607 return fold_convert_loc (loc
,
7608 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7612 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7613 sizeof (long long) == sizeof (long). */
7614 if (TYPE_PRECISION (long_long_integer_type_node
)
7615 == TYPE_PRECISION (long_integer_type_node
))
7617 tree newfn
= NULL_TREE
;
7620 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7621 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7624 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7625 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7628 CASE_FLT_FN (BUILT_IN_LLROUND
):
7629 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7632 CASE_FLT_FN (BUILT_IN_LLRINT
):
7633 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7642 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7643 return fold_convert_loc (loc
,
7644 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7651 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7652 return type. Return NULL_TREE if no simplification can be made. */
7655 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7659 if (!validate_arg (arg
, COMPLEX_TYPE
)
7660 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7663 /* Calculate the result when the argument is a constant. */
7664 if (TREE_CODE (arg
) == COMPLEX_CST
7665 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7669 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7671 tree real
= TREE_OPERAND (arg
, 0);
7672 tree imag
= TREE_OPERAND (arg
, 1);
7674 /* If either part is zero, cabs is fabs of the other. */
7675 if (real_zerop (real
))
7676 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7677 if (real_zerop (imag
))
7678 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7680 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7681 if (flag_unsafe_math_optimizations
7682 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7684 const REAL_VALUE_TYPE sqrt2_trunc
7685 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7687 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7688 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7689 build_real (type
, sqrt2_trunc
));
7693 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7694 if (TREE_CODE (arg
) == NEGATE_EXPR
7695 || TREE_CODE (arg
) == CONJ_EXPR
)
7696 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7698 /* Don't do this when optimizing for size. */
7699 if (flag_unsafe_math_optimizations
7700 && optimize
&& optimize_function_for_speed_p (cfun
))
7702 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7704 if (sqrtfn
!= NULL_TREE
)
7706 tree rpart
, ipart
, result
;
7708 arg
= builtin_save_expr (arg
);
7710 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7711 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7713 rpart
= builtin_save_expr (rpart
);
7714 ipart
= builtin_save_expr (ipart
);
7716 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7717 fold_build2_loc (loc
, MULT_EXPR
, type
,
7719 fold_build2_loc (loc
, MULT_EXPR
, type
,
7722 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7729 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7730 complex tree type of the result. If NEG is true, the imaginary
7731 zero is negative. */
7734 build_complex_cproj (tree type
, bool neg
)
7736 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7740 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7741 build_real (TREE_TYPE (type
), rzero
));
7744 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7745 return type. Return NULL_TREE if no simplification can be made. */
7748 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7750 if (!validate_arg (arg
, COMPLEX_TYPE
)
7751 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7754 /* If there are no infinities, return arg. */
7755 if (! HONOR_INFINITIES (type
))
7756 return non_lvalue_loc (loc
, arg
);
7758 /* Calculate the result when the argument is a constant. */
7759 if (TREE_CODE (arg
) == COMPLEX_CST
)
7761 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7762 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7764 if (real_isinf (real
) || real_isinf (imag
))
7765 return build_complex_cproj (type
, imag
->sign
);
7769 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7771 tree real
= TREE_OPERAND (arg
, 0);
7772 tree imag
= TREE_OPERAND (arg
, 1);
7777 /* If the real part is inf and the imag part is known to be
7778 nonnegative, return (inf + 0i). Remember side-effects are
7779 possible in the imag part. */
7780 if (TREE_CODE (real
) == REAL_CST
7781 && real_isinf (TREE_REAL_CST_PTR (real
))
7782 && tree_expr_nonnegative_p (imag
))
7783 return omit_one_operand_loc (loc
, type
,
7784 build_complex_cproj (type
, false),
7787 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7788 Remember side-effects are possible in the real part. */
7789 if (TREE_CODE (imag
) == REAL_CST
7790 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7792 omit_one_operand_loc (loc
, type
,
7793 build_complex_cproj (type
, TREE_REAL_CST_PTR
7794 (imag
)->sign
), arg
);
7800 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7801 Return NULL_TREE if no simplification can be made. */
7804 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7807 enum built_in_function fcode
;
7810 if (!validate_arg (arg
, REAL_TYPE
))
7813 /* Calculate the result when the argument is a constant. */
7814 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7817 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7818 fcode
= builtin_mathfn_code (arg
);
7819 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7821 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7822 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7823 CALL_EXPR_ARG (arg
, 0),
7824 build_real (type
, dconsthalf
));
7825 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7828 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7829 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7831 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7835 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7837 /* The inner root was either sqrt or cbrt. */
7838 /* This was a conditional expression but it triggered a bug
7840 REAL_VALUE_TYPE dconstroot
;
7841 if (BUILTIN_SQRT_P (fcode
))
7842 dconstroot
= dconsthalf
;
7844 dconstroot
= dconst_third ();
7846 /* Adjust for the outer root. */
7847 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7848 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7849 tree_root
= build_real (type
, dconstroot
);
7850 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7854 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7855 if (flag_unsafe_math_optimizations
7856 && (fcode
== BUILT_IN_POW
7857 || fcode
== BUILT_IN_POWF
7858 || fcode
== BUILT_IN_POWL
))
7860 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7861 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7862 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7864 if (!tree_expr_nonnegative_p (arg0
))
7865 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7866 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7867 build_real (type
, dconsthalf
));
7868 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7874 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7875 Return NULL_TREE if no simplification can be made. */
7878 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7880 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7883 if (!validate_arg (arg
, REAL_TYPE
))
7886 /* Calculate the result when the argument is a constant. */
7887 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7890 if (flag_unsafe_math_optimizations
)
7892 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7893 if (BUILTIN_EXPONENT_P (fcode
))
7895 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7896 const REAL_VALUE_TYPE third_trunc
=
7897 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7898 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7899 CALL_EXPR_ARG (arg
, 0),
7900 build_real (type
, third_trunc
));
7901 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7904 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7905 if (BUILTIN_SQRT_P (fcode
))
7907 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7911 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7913 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7915 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7916 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7917 tree_root
= build_real (type
, dconstroot
);
7918 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7922 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7923 if (BUILTIN_CBRT_P (fcode
))
7925 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7926 if (tree_expr_nonnegative_p (arg0
))
7928 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7933 REAL_VALUE_TYPE dconstroot
;
7935 real_arithmetic (&dconstroot
, MULT_EXPR
,
7936 dconst_third_ptr (), dconst_third_ptr ());
7937 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7938 tree_root
= build_real (type
, dconstroot
);
7939 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7944 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7945 if (fcode
== BUILT_IN_POW
7946 || fcode
== BUILT_IN_POWF
7947 || fcode
== BUILT_IN_POWL
)
7949 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7950 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7951 if (tree_expr_nonnegative_p (arg00
))
7953 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7954 const REAL_VALUE_TYPE dconstroot
7955 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7956 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7957 build_real (type
, dconstroot
));
7958 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7965 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7966 TYPE is the type of the return value. Return NULL_TREE if no
7967 simplification can be made. */
7970 fold_builtin_cos (location_t loc
,
7971 tree arg
, tree type
, tree fndecl
)
7975 if (!validate_arg (arg
, REAL_TYPE
))
7978 /* Calculate the result when the argument is a constant. */
7979 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7982 /* Optimize cos(-x) into cos (x). */
7983 if ((narg
= fold_strip_sign_ops (arg
)))
7984 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7989 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7990 Return NULL_TREE if no simplification can be made. */
7993 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7995 if (validate_arg (arg
, REAL_TYPE
))
7999 /* Calculate the result when the argument is a constant. */
8000 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
8003 /* Optimize cosh(-x) into cosh (x). */
8004 if ((narg
= fold_strip_sign_ops (arg
)))
8005 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
8011 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8012 argument ARG. TYPE is the type of the return value. Return
8013 NULL_TREE if no simplification can be made. */
8016 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
8019 if (validate_arg (arg
, COMPLEX_TYPE
)
8020 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8024 /* Calculate the result when the argument is a constant. */
8025 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
8028 /* Optimize fn(-x) into fn(x). */
8029 if ((tmp
= fold_strip_sign_ops (arg
)))
8030 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
8036 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8037 Return NULL_TREE if no simplification can be made. */
8040 fold_builtin_tan (tree arg
, tree type
)
8042 enum built_in_function fcode
;
8045 if (!validate_arg (arg
, REAL_TYPE
))
8048 /* Calculate the result when the argument is a constant. */
8049 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
8052 /* Optimize tan(atan(x)) = x. */
8053 fcode
= builtin_mathfn_code (arg
);
8054 if (flag_unsafe_math_optimizations
8055 && (fcode
== BUILT_IN_ATAN
8056 || fcode
== BUILT_IN_ATANF
8057 || fcode
== BUILT_IN_ATANL
))
8058 return CALL_EXPR_ARG (arg
, 0);
8063 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8064 NULL_TREE if no simplification can be made. */
8067 fold_builtin_sincos (location_t loc
,
8068 tree arg0
, tree arg1
, tree arg2
)
8073 if (!validate_arg (arg0
, REAL_TYPE
)
8074 || !validate_arg (arg1
, POINTER_TYPE
)
8075 || !validate_arg (arg2
, POINTER_TYPE
))
8078 type
= TREE_TYPE (arg0
);
8080 /* Calculate the result when the argument is a constant. */
8081 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
8084 /* Canonicalize sincos to cexpi. */
8085 if (!targetm
.libc_has_function (function_c99_math_complex
))
8087 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8091 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8092 call
= builtin_save_expr (call
);
8094 return build2 (COMPOUND_EXPR
, void_type_node
,
8095 build2 (MODIFY_EXPR
, void_type_node
,
8096 build_fold_indirect_ref_loc (loc
, arg1
),
8097 build1 (IMAGPART_EXPR
, type
, call
)),
8098 build2 (MODIFY_EXPR
, void_type_node
,
8099 build_fold_indirect_ref_loc (loc
, arg2
),
8100 build1 (REALPART_EXPR
, type
, call
)));
8103 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8104 NULL_TREE if no simplification can be made. */
8107 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8110 tree realp
, imagp
, ifn
;
8113 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8114 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8117 /* Calculate the result when the argument is a constant. */
8118 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8121 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8123 /* In case we can figure out the real part of arg0 and it is constant zero
8125 if (!targetm
.libc_has_function (function_c99_math_complex
))
8127 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8131 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8132 && real_zerop (realp
))
8134 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8135 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8138 /* In case we can easily decompose real and imaginary parts split cexp
8139 to exp (r) * cexpi (i). */
8140 if (flag_unsafe_math_optimizations
8143 tree rfn
, rcall
, icall
;
8145 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8149 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8153 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8154 icall
= builtin_save_expr (icall
);
8155 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8156 rcall
= builtin_save_expr (rcall
);
8157 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8158 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8160 fold_build1_loc (loc
, REALPART_EXPR
,
8162 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8164 fold_build1_loc (loc
, IMAGPART_EXPR
,
8171 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8172 Return NULL_TREE if no simplification can be made. */
8175 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8177 if (!validate_arg (arg
, REAL_TYPE
))
8180 /* Optimize trunc of constant value. */
8181 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8183 REAL_VALUE_TYPE r
, x
;
8184 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8186 x
= TREE_REAL_CST (arg
);
8187 real_trunc (&r
, TYPE_MODE (type
), &x
);
8188 return build_real (type
, r
);
8191 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8194 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8195 Return NULL_TREE if no simplification can be made. */
8198 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8200 if (!validate_arg (arg
, REAL_TYPE
))
8203 /* Optimize floor of constant value. */
8204 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8208 x
= TREE_REAL_CST (arg
);
8209 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8211 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8214 real_floor (&r
, TYPE_MODE (type
), &x
);
8215 return build_real (type
, r
);
8219 /* Fold floor (x) where x is nonnegative to trunc (x). */
8220 if (tree_expr_nonnegative_p (arg
))
8222 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8224 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8227 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8230 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8231 Return NULL_TREE if no simplification can be made. */
8234 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8236 if (!validate_arg (arg
, REAL_TYPE
))
8239 /* Optimize ceil of constant value. */
8240 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8244 x
= TREE_REAL_CST (arg
);
8245 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8247 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8250 real_ceil (&r
, TYPE_MODE (type
), &x
);
8251 return build_real (type
, r
);
8255 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8258 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8259 Return NULL_TREE if no simplification can be made. */
8262 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8264 if (!validate_arg (arg
, REAL_TYPE
))
8267 /* Optimize round of constant value. */
8268 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8272 x
= TREE_REAL_CST (arg
);
8273 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8275 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8278 real_round (&r
, TYPE_MODE (type
), &x
);
8279 return build_real (type
, r
);
8283 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8286 /* Fold function call to builtin lround, lroundf or lroundl (or the
8287 corresponding long long versions) and other rounding functions. ARG
8288 is the argument to the call. Return NULL_TREE if no simplification
8292 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8294 if (!validate_arg (arg
, REAL_TYPE
))
8297 /* Optimize lround of constant value. */
8298 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8300 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8302 if (real_isfinite (&x
))
8304 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8305 tree ftype
= TREE_TYPE (arg
);
8309 switch (DECL_FUNCTION_CODE (fndecl
))
8311 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8312 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8313 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8314 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8317 CASE_FLT_FN (BUILT_IN_ICEIL
):
8318 CASE_FLT_FN (BUILT_IN_LCEIL
):
8319 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8320 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8323 CASE_FLT_FN (BUILT_IN_IROUND
):
8324 CASE_FLT_FN (BUILT_IN_LROUND
):
8325 CASE_FLT_FN (BUILT_IN_LLROUND
):
8326 real_round (&r
, TYPE_MODE (ftype
), &x
);
8333 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8335 return wide_int_to_tree (itype
, val
);
8339 switch (DECL_FUNCTION_CODE (fndecl
))
8341 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8342 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8343 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8344 if (tree_expr_nonnegative_p (arg
))
8345 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8346 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8351 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8354 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8355 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8356 the argument to the call. Return NULL_TREE if no simplification can
8360 fold_builtin_bitop (tree fndecl
, tree arg
)
8362 if (!validate_arg (arg
, INTEGER_TYPE
))
8365 /* Optimize for constant argument. */
8366 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8368 tree type
= TREE_TYPE (arg
);
8371 switch (DECL_FUNCTION_CODE (fndecl
))
8373 CASE_INT_FN (BUILT_IN_FFS
):
8374 result
= wi::ffs (arg
);
8377 CASE_INT_FN (BUILT_IN_CLZ
):
8378 if (wi::ne_p (arg
, 0))
8379 result
= wi::clz (arg
);
8380 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8381 result
= TYPE_PRECISION (type
);
8384 CASE_INT_FN (BUILT_IN_CTZ
):
8385 if (wi::ne_p (arg
, 0))
8386 result
= wi::ctz (arg
);
8387 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8388 result
= TYPE_PRECISION (type
);
8391 CASE_INT_FN (BUILT_IN_CLRSB
):
8392 result
= wi::clrsb (arg
);
8395 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8396 result
= wi::popcount (arg
);
8399 CASE_INT_FN (BUILT_IN_PARITY
):
8400 result
= wi::parity (arg
);
8407 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8413 /* Fold function call to builtin_bswap and the short, long and long long
8414 variants. Return NULL_TREE if no simplification can be made. */
8416 fold_builtin_bswap (tree fndecl
, tree arg
)
8418 if (! validate_arg (arg
, INTEGER_TYPE
))
8421 /* Optimize constant value. */
8422 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8424 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8426 switch (DECL_FUNCTION_CODE (fndecl
))
8428 case BUILT_IN_BSWAP16
:
8429 case BUILT_IN_BSWAP32
:
8430 case BUILT_IN_BSWAP64
:
8432 signop sgn
= TYPE_SIGN (type
);
8434 wide_int_to_tree (type
,
8435 wide_int::from (arg
, TYPE_PRECISION (type
),
8447 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8448 NULL_TREE if no simplification can be made. */
8451 fold_builtin_hypot (location_t loc
, tree fndecl
,
8452 tree arg0
, tree arg1
, tree type
)
8454 tree res
, narg0
, narg1
;
8456 if (!validate_arg (arg0
, REAL_TYPE
)
8457 || !validate_arg (arg1
, REAL_TYPE
))
8460 /* Calculate the result when the argument is a constant. */
8461 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8464 /* If either argument to hypot has a negate or abs, strip that off.
8465 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8466 narg0
= fold_strip_sign_ops (arg0
);
8467 narg1
= fold_strip_sign_ops (arg1
);
8470 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8471 narg1
? narg1
: arg1
);
8474 /* If either argument is zero, hypot is fabs of the other. */
8475 if (real_zerop (arg0
))
8476 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8477 else if (real_zerop (arg1
))
8478 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8480 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8481 if (flag_unsafe_math_optimizations
8482 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8484 const REAL_VALUE_TYPE sqrt2_trunc
8485 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8486 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8487 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8488 build_real (type
, sqrt2_trunc
));
8495 /* Fold a builtin function call to pow, powf, or powl. Return
8496 NULL_TREE if no simplification can be made. */
8498 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8502 if (!validate_arg (arg0
, REAL_TYPE
)
8503 || !validate_arg (arg1
, REAL_TYPE
))
8506 /* Calculate the result when the argument is a constant. */
8507 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8510 /* Optimize pow(1.0,y) = 1.0. */
8511 if (real_onep (arg0
))
8512 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8514 if (TREE_CODE (arg1
) == REAL_CST
8515 && !TREE_OVERFLOW (arg1
))
8517 REAL_VALUE_TYPE cint
;
8521 c
= TREE_REAL_CST (arg1
);
8523 /* Optimize pow(x,0.0) = 1.0. */
8524 if (REAL_VALUES_EQUAL (c
, dconst0
))
8525 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8528 /* Optimize pow(x,1.0) = x. */
8529 if (REAL_VALUES_EQUAL (c
, dconst1
))
8532 /* Optimize pow(x,-1.0) = 1.0/x. */
8533 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8534 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8535 build_real (type
, dconst1
), arg0
);
8537 /* Optimize pow(x,0.5) = sqrt(x). */
8538 if (flag_unsafe_math_optimizations
8539 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8541 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8543 if (sqrtfn
!= NULL_TREE
)
8544 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8547 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8548 if (flag_unsafe_math_optimizations
)
8550 const REAL_VALUE_TYPE dconstroot
8551 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8553 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8555 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8556 if (cbrtfn
!= NULL_TREE
)
8557 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8561 /* Check for an integer exponent. */
8562 n
= real_to_integer (&c
);
8563 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8564 if (real_identical (&c
, &cint
))
8566 /* Attempt to evaluate pow at compile-time, unless this should
8567 raise an exception. */
8568 if (TREE_CODE (arg0
) == REAL_CST
8569 && !TREE_OVERFLOW (arg0
)
8571 || (!flag_trapping_math
&& !flag_errno_math
)
8572 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8577 x
= TREE_REAL_CST (arg0
);
8578 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8579 if (flag_unsafe_math_optimizations
|| !inexact
)
8580 return build_real (type
, x
);
8583 /* Strip sign ops from even integer powers. */
8584 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8586 tree narg0
= fold_strip_sign_ops (arg0
);
8588 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8593 if (flag_unsafe_math_optimizations
)
8595 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8597 /* Optimize pow(expN(x),y) = expN(x*y). */
8598 if (BUILTIN_EXPONENT_P (fcode
))
8600 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8601 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8602 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8603 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8606 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8607 if (BUILTIN_SQRT_P (fcode
))
8609 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8610 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8611 build_real (type
, dconsthalf
));
8612 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8615 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8616 if (BUILTIN_CBRT_P (fcode
))
8618 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8619 if (tree_expr_nonnegative_p (arg
))
8621 const REAL_VALUE_TYPE dconstroot
8622 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8623 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8624 build_real (type
, dconstroot
));
8625 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8629 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8630 if (fcode
== BUILT_IN_POW
8631 || fcode
== BUILT_IN_POWF
8632 || fcode
== BUILT_IN_POWL
)
8634 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8635 if (tree_expr_nonnegative_p (arg00
))
8637 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8638 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8639 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8647 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8648 Return NULL_TREE if no simplification can be made. */
8650 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8651 tree arg0
, tree arg1
, tree type
)
8653 if (!validate_arg (arg0
, REAL_TYPE
)
8654 || !validate_arg (arg1
, INTEGER_TYPE
))
8657 /* Optimize pow(1.0,y) = 1.0. */
8658 if (real_onep (arg0
))
8659 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8661 if (tree_fits_shwi_p (arg1
))
8663 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8665 /* Evaluate powi at compile-time. */
8666 if (TREE_CODE (arg0
) == REAL_CST
8667 && !TREE_OVERFLOW (arg0
))
8670 x
= TREE_REAL_CST (arg0
);
8671 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8672 return build_real (type
, x
);
8675 /* Optimize pow(x,0) = 1.0. */
8677 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8680 /* Optimize pow(x,1) = x. */
8684 /* Optimize pow(x,-1) = 1.0/x. */
8686 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8687 build_real (type
, dconst1
), arg0
);
8693 /* A subroutine of fold_builtin to fold the various exponent
8694 functions. Return NULL_TREE if no simplification can be made.
8695 FUNC is the corresponding MPFR exponent function. */
8698 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8699 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8701 if (validate_arg (arg
, REAL_TYPE
))
8703 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8706 /* Calculate the result when the argument is a constant. */
8707 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8710 /* Optimize expN(logN(x)) = x. */
8711 if (flag_unsafe_math_optimizations
)
8713 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8715 if ((func
== mpfr_exp
8716 && (fcode
== BUILT_IN_LOG
8717 || fcode
== BUILT_IN_LOGF
8718 || fcode
== BUILT_IN_LOGL
))
8719 || (func
== mpfr_exp2
8720 && (fcode
== BUILT_IN_LOG2
8721 || fcode
== BUILT_IN_LOG2F
8722 || fcode
== BUILT_IN_LOG2L
))
8723 || (func
== mpfr_exp10
8724 && (fcode
== BUILT_IN_LOG10
8725 || fcode
== BUILT_IN_LOG10F
8726 || fcode
== BUILT_IN_LOG10L
)))
8727 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8734 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8735 arguments to the call, and TYPE is its return type.
8736 Return NULL_TREE if no simplification can be made. */
8739 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8741 if (!validate_arg (arg1
, POINTER_TYPE
)
8742 || !validate_arg (arg2
, INTEGER_TYPE
)
8743 || !validate_arg (len
, INTEGER_TYPE
))
8749 if (TREE_CODE (arg2
) != INTEGER_CST
8750 || !tree_fits_uhwi_p (len
))
8753 p1
= c_getstr (arg1
);
8754 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8760 if (target_char_cast (arg2
, &c
))
8763 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8766 return build_int_cst (TREE_TYPE (arg1
), 0);
8768 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8769 return fold_convert_loc (loc
, type
, tem
);
8775 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8776 Return NULL_TREE if no simplification can be made. */
8779 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8781 const char *p1
, *p2
;
8783 if (!validate_arg (arg1
, POINTER_TYPE
)
8784 || !validate_arg (arg2
, POINTER_TYPE
)
8785 || !validate_arg (len
, INTEGER_TYPE
))
8788 /* If the LEN parameter is zero, return zero. */
8789 if (integer_zerop (len
))
8790 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8793 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8794 if (operand_equal_p (arg1
, arg2
, 0))
8795 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8797 p1
= c_getstr (arg1
);
8798 p2
= c_getstr (arg2
);
8800 /* If all arguments are constant, and the value of len is not greater
8801 than the lengths of arg1 and arg2, evaluate at compile-time. */
8802 if (tree_fits_uhwi_p (len
) && p1
&& p2
8803 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8804 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8806 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8809 return integer_one_node
;
8811 return integer_minus_one_node
;
8813 return integer_zero_node
;
8816 /* If len parameter is one, return an expression corresponding to
8817 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8818 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8820 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8821 tree cst_uchar_ptr_node
8822 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8825 = fold_convert_loc (loc
, integer_type_node
,
8826 build1 (INDIRECT_REF
, cst_uchar_node
,
8827 fold_convert_loc (loc
,
8831 = fold_convert_loc (loc
, integer_type_node
,
8832 build1 (INDIRECT_REF
, cst_uchar_node
,
8833 fold_convert_loc (loc
,
8836 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8842 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8843 Return NULL_TREE if no simplification can be made. */
8846 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8848 const char *p1
, *p2
;
8850 if (!validate_arg (arg1
, POINTER_TYPE
)
8851 || !validate_arg (arg2
, POINTER_TYPE
))
8854 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8855 if (operand_equal_p (arg1
, arg2
, 0))
8856 return integer_zero_node
;
8858 p1
= c_getstr (arg1
);
8859 p2
= c_getstr (arg2
);
8863 const int i
= strcmp (p1
, p2
);
8865 return integer_minus_one_node
;
8867 return integer_one_node
;
8869 return integer_zero_node
;
8872 /* If the second arg is "", return *(const unsigned char*)arg1. */
8873 if (p2
&& *p2
== '\0')
8875 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8876 tree cst_uchar_ptr_node
8877 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8879 return fold_convert_loc (loc
, integer_type_node
,
8880 build1 (INDIRECT_REF
, cst_uchar_node
,
8881 fold_convert_loc (loc
,
8886 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8887 if (p1
&& *p1
== '\0')
8889 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8890 tree cst_uchar_ptr_node
8891 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8894 = fold_convert_loc (loc
, integer_type_node
,
8895 build1 (INDIRECT_REF
, cst_uchar_node
,
8896 fold_convert_loc (loc
,
8899 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8905 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8906 Return NULL_TREE if no simplification can be made. */
8909 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8911 const char *p1
, *p2
;
8913 if (!validate_arg (arg1
, POINTER_TYPE
)
8914 || !validate_arg (arg2
, POINTER_TYPE
)
8915 || !validate_arg (len
, INTEGER_TYPE
))
8918 /* If the LEN parameter is zero, return zero. */
8919 if (integer_zerop (len
))
8920 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8923 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8924 if (operand_equal_p (arg1
, arg2
, 0))
8925 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8927 p1
= c_getstr (arg1
);
8928 p2
= c_getstr (arg2
);
8930 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8932 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8934 return integer_one_node
;
8936 return integer_minus_one_node
;
8938 return integer_zero_node
;
8941 /* If the second arg is "", and the length is greater than zero,
8942 return *(const unsigned char*)arg1. */
8943 if (p2
&& *p2
== '\0'
8944 && TREE_CODE (len
) == INTEGER_CST
8945 && tree_int_cst_sgn (len
) == 1)
8947 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8948 tree cst_uchar_ptr_node
8949 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8951 return fold_convert_loc (loc
, integer_type_node
,
8952 build1 (INDIRECT_REF
, cst_uchar_node
,
8953 fold_convert_loc (loc
,
8958 /* If the first arg is "", and the length is greater than zero,
8959 return -*(const unsigned char*)arg2. */
8960 if (p1
&& *p1
== '\0'
8961 && TREE_CODE (len
) == INTEGER_CST
8962 && tree_int_cst_sgn (len
) == 1)
8964 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8965 tree cst_uchar_ptr_node
8966 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8968 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8969 build1 (INDIRECT_REF
, cst_uchar_node
,
8970 fold_convert_loc (loc
,
8973 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8976 /* If len parameter is one, return an expression corresponding to
8977 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8978 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8980 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8981 tree cst_uchar_ptr_node
8982 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8984 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8985 build1 (INDIRECT_REF
, cst_uchar_node
,
8986 fold_convert_loc (loc
,
8989 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8990 build1 (INDIRECT_REF
, cst_uchar_node
,
8991 fold_convert_loc (loc
,
8994 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9000 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9001 ARG. Return NULL_TREE if no simplification can be made. */
9004 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9006 if (!validate_arg (arg
, REAL_TYPE
))
9009 /* If ARG is a compile-time constant, determine the result. */
9010 if (TREE_CODE (arg
) == REAL_CST
9011 && !TREE_OVERFLOW (arg
))
9015 c
= TREE_REAL_CST (arg
);
9016 return (REAL_VALUE_NEGATIVE (c
)
9017 ? build_one_cst (type
)
9018 : build_zero_cst (type
));
9021 /* If ARG is non-negative, the result is always zero. */
9022 if (tree_expr_nonnegative_p (arg
))
9023 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9025 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9026 if (!HONOR_SIGNED_ZEROS (arg
))
9027 return fold_convert (type
,
9028 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9029 build_real (TREE_TYPE (arg
), dconst0
)));
9034 /* Fold function call to builtin copysign, copysignf or copysignl with
9035 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9039 fold_builtin_copysign (location_t loc
, tree fndecl
,
9040 tree arg1
, tree arg2
, tree type
)
9044 if (!validate_arg (arg1
, REAL_TYPE
)
9045 || !validate_arg (arg2
, REAL_TYPE
))
9048 /* copysign(X,X) is X. */
9049 if (operand_equal_p (arg1
, arg2
, 0))
9050 return fold_convert_loc (loc
, type
, arg1
);
9052 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9053 if (TREE_CODE (arg1
) == REAL_CST
9054 && TREE_CODE (arg2
) == REAL_CST
9055 && !TREE_OVERFLOW (arg1
)
9056 && !TREE_OVERFLOW (arg2
))
9058 REAL_VALUE_TYPE c1
, c2
;
9060 c1
= TREE_REAL_CST (arg1
);
9061 c2
= TREE_REAL_CST (arg2
);
9062 /* c1.sign := c2.sign. */
9063 real_copysign (&c1
, &c2
);
9064 return build_real (type
, c1
);
9067 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9068 Remember to evaluate Y for side-effects. */
9069 if (tree_expr_nonnegative_p (arg2
))
9070 return omit_one_operand_loc (loc
, type
,
9071 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9074 /* Strip sign changing operations for the first argument. */
9075 tem
= fold_strip_sign_ops (arg1
);
9077 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9082 /* Fold a call to builtin isascii with argument ARG. */
9085 fold_builtin_isascii (location_t loc
, tree arg
)
9087 if (!validate_arg (arg
, INTEGER_TYPE
))
9091 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9092 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9093 build_int_cst (integer_type_node
,
9094 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9095 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9096 arg
, integer_zero_node
);
9100 /* Fold a call to builtin toascii with argument ARG. */
9103 fold_builtin_toascii (location_t loc
, tree arg
)
9105 if (!validate_arg (arg
, INTEGER_TYPE
))
9108 /* Transform toascii(c) -> (c & 0x7f). */
9109 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9110 build_int_cst (integer_type_node
, 0x7f));
9113 /* Fold a call to builtin isdigit with argument ARG. */
9116 fold_builtin_isdigit (location_t loc
, tree arg
)
9118 if (!validate_arg (arg
, INTEGER_TYPE
))
9122 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9123 /* According to the C standard, isdigit is unaffected by locale.
9124 However, it definitely is affected by the target character set. */
9125 unsigned HOST_WIDE_INT target_digit0
9126 = lang_hooks
.to_target_charset ('0');
9128 if (target_digit0
== 0)
9131 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9132 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9133 build_int_cst (unsigned_type_node
, target_digit0
));
9134 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9135 build_int_cst (unsigned_type_node
, 9));
9139 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9142 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9144 if (!validate_arg (arg
, REAL_TYPE
))
9147 arg
= fold_convert_loc (loc
, type
, arg
);
9148 if (TREE_CODE (arg
) == REAL_CST
)
9149 return fold_abs_const (arg
, type
);
9150 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9153 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9156 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9158 if (!validate_arg (arg
, INTEGER_TYPE
))
9161 arg
= fold_convert_loc (loc
, type
, arg
);
9162 if (TREE_CODE (arg
) == INTEGER_CST
)
9163 return fold_abs_const (arg
, type
);
9164 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9167 /* Fold a fma operation with arguments ARG[012]. */
9170 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9171 tree type
, tree arg0
, tree arg1
, tree arg2
)
9173 if (TREE_CODE (arg0
) == REAL_CST
9174 && TREE_CODE (arg1
) == REAL_CST
9175 && TREE_CODE (arg2
) == REAL_CST
)
9176 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9181 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9184 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9186 if (validate_arg (arg0
, REAL_TYPE
)
9187 && validate_arg (arg1
, REAL_TYPE
)
9188 && validate_arg (arg2
, REAL_TYPE
))
9190 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9194 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9195 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9196 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9201 /* Fold a call to builtin fmin or fmax. */
9204 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9205 tree type
, bool max
)
9207 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9209 /* Calculate the result when the argument is a constant. */
9210 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9215 /* If either argument is NaN, return the other one. Avoid the
9216 transformation if we get (and honor) a signalling NaN. Using
9217 omit_one_operand() ensures we create a non-lvalue. */
9218 if (TREE_CODE (arg0
) == REAL_CST
9219 && real_isnan (&TREE_REAL_CST (arg0
))
9220 && (! HONOR_SNANS (arg0
)
9221 || ! TREE_REAL_CST (arg0
).signalling
))
9222 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9223 if (TREE_CODE (arg1
) == REAL_CST
9224 && real_isnan (&TREE_REAL_CST (arg1
))
9225 && (! HONOR_SNANS (arg1
)
9226 || ! TREE_REAL_CST (arg1
).signalling
))
9227 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9229 /* Transform fmin/fmax(x,x) -> x. */
9230 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9231 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9233 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9234 functions to return the numeric arg if the other one is NaN.
9235 These tree codes don't honor that, so only transform if
9236 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9237 handled, so we don't have to worry about it either. */
9238 if (flag_finite_math_only
)
9239 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9240 fold_convert_loc (loc
, type
, arg0
),
9241 fold_convert_loc (loc
, type
, arg1
));
9246 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9249 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9251 if (validate_arg (arg
, COMPLEX_TYPE
)
9252 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9254 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9258 tree new_arg
= builtin_save_expr (arg
);
9259 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9260 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9261 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9268 /* Fold a call to builtin logb/ilogb. */
9271 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9273 if (! validate_arg (arg
, REAL_TYPE
))
9278 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9280 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9286 /* If arg is Inf or NaN and we're logb, return it. */
9287 if (TREE_CODE (rettype
) == REAL_TYPE
)
9289 /* For logb(-Inf) we have to return +Inf. */
9290 if (real_isinf (value
) && real_isneg (value
))
9292 REAL_VALUE_TYPE tem
;
9294 return build_real (rettype
, tem
);
9296 return fold_convert_loc (loc
, rettype
, arg
);
9298 /* Fall through... */
9300 /* Zero may set errno and/or raise an exception for logb, also
9301 for ilogb we don't know FP_ILOGB0. */
9304 /* For normal numbers, proceed iff radix == 2. In GCC,
9305 normalized significands are in the range [0.5, 1.0). We
9306 want the exponent as if they were [1.0, 2.0) so get the
9307 exponent and subtract 1. */
9308 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9309 return fold_convert_loc (loc
, rettype
,
9310 build_int_cst (integer_type_node
,
9311 REAL_EXP (value
)-1));
9319 /* Fold a call to builtin significand, if radix == 2. */
9322 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9324 if (! validate_arg (arg
, REAL_TYPE
))
9329 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9331 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9338 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9339 return fold_convert_loc (loc
, rettype
, arg
);
9341 /* For normal numbers, proceed iff radix == 2. */
9342 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9344 REAL_VALUE_TYPE result
= *value
;
9345 /* In GCC, normalized significands are in the range [0.5,
9346 1.0). We want them to be [1.0, 2.0) so set the
9348 SET_REAL_EXP (&result
, 1);
9349 return build_real (rettype
, result
);
9358 /* Fold a call to builtin frexp, we can assume the base is 2. */
9361 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9363 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9368 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9371 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9373 /* Proceed if a valid pointer type was passed in. */
9374 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9376 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9382 /* For +-0, return (*exp = 0, +-0). */
9383 exp
= integer_zero_node
;
9388 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9389 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9392 /* Since the frexp function always expects base 2, and in
9393 GCC normalized significands are already in the range
9394 [0.5, 1.0), we have exactly what frexp wants. */
9395 REAL_VALUE_TYPE frac_rvt
= *value
;
9396 SET_REAL_EXP (&frac_rvt
, 0);
9397 frac
= build_real (rettype
, frac_rvt
);
9398 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9405 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9406 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9407 TREE_SIDE_EFFECTS (arg1
) = 1;
9408 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9414 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9415 then we can assume the base is two. If it's false, then we have to
9416 check the mode of the TYPE parameter in certain cases. */
9419 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9420 tree type
, bool ldexp
)
9422 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9427 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9428 if (real_zerop (arg0
) || integer_zerop (arg1
)
9429 || (TREE_CODE (arg0
) == REAL_CST
9430 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9431 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9433 /* If both arguments are constant, then try to evaluate it. */
9434 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9435 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9436 && tree_fits_shwi_p (arg1
))
9438 /* Bound the maximum adjustment to twice the range of the
9439 mode's valid exponents. Use abs to ensure the range is
9440 positive as a sanity check. */
9441 const long max_exp_adj
= 2 *
9442 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9443 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9445 /* Get the user-requested adjustment. */
9446 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9448 /* The requested adjustment must be inside this range. This
9449 is a preliminary cap to avoid things like overflow, we
9450 may still fail to compute the result for other reasons. */
9451 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9453 REAL_VALUE_TYPE initial_result
;
9455 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9457 /* Ensure we didn't overflow. */
9458 if (! real_isinf (&initial_result
))
9460 const REAL_VALUE_TYPE trunc_result
9461 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9463 /* Only proceed if the target mode can hold the
9465 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9466 return build_real (type
, trunc_result
);
9475 /* Fold a call to builtin modf. */
9478 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9480 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9485 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9488 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9490 /* Proceed if a valid pointer type was passed in. */
9491 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9493 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9494 REAL_VALUE_TYPE trunc
, frac
;
9500 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9501 trunc
= frac
= *value
;
9504 /* For +-Inf, return (*arg1 = arg0, +-0). */
9506 frac
.sign
= value
->sign
;
9510 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9511 real_trunc (&trunc
, VOIDmode
, value
);
9512 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9513 /* If the original number was negative and already
9514 integral, then the fractional part is -0.0. */
9515 if (value
->sign
&& frac
.cl
== rvc_zero
)
9516 frac
.sign
= value
->sign
;
9520 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9521 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9522 build_real (rettype
, trunc
));
9523 TREE_SIDE_EFFECTS (arg1
) = 1;
9524 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9525 build_real (rettype
, frac
));
9531 /* Given a location LOC, an interclass builtin function decl FNDECL
9532 and its single argument ARG, return an folded expression computing
9533 the same, or NULL_TREE if we either couldn't or didn't want to fold
9534 (the latter happen if there's an RTL instruction available). */
9537 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9541 if (!validate_arg (arg
, REAL_TYPE
))
9544 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9547 mode
= TYPE_MODE (TREE_TYPE (arg
));
9549 /* If there is no optab, try generic code. */
9550 switch (DECL_FUNCTION_CODE (fndecl
))
9554 CASE_FLT_FN (BUILT_IN_ISINF
):
9556 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9557 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9558 tree
const type
= TREE_TYPE (arg
);
9562 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9563 real_from_string (&r
, buf
);
9564 result
= build_call_expr (isgr_fn
, 2,
9565 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9566 build_real (type
, r
));
9569 CASE_FLT_FN (BUILT_IN_FINITE
):
9570 case BUILT_IN_ISFINITE
:
9572 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9573 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9574 tree
const type
= TREE_TYPE (arg
);
9578 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9579 real_from_string (&r
, buf
);
9580 result
= build_call_expr (isle_fn
, 2,
9581 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9582 build_real (type
, r
));
9583 /*result = fold_build2_loc (loc, UNGT_EXPR,
9584 TREE_TYPE (TREE_TYPE (fndecl)),
9585 fold_build1_loc (loc, ABS_EXPR, type, arg),
9586 build_real (type, r));
9587 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9588 TREE_TYPE (TREE_TYPE (fndecl)),
9592 case BUILT_IN_ISNORMAL
:
9594 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9595 islessequal(fabs(x),DBL_MAX). */
9596 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9597 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9598 tree
const type
= TREE_TYPE (arg
);
9599 REAL_VALUE_TYPE rmax
, rmin
;
9602 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9603 real_from_string (&rmax
, buf
);
9604 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9605 real_from_string (&rmin
, buf
);
9606 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9607 result
= build_call_expr (isle_fn
, 2, arg
,
9608 build_real (type
, rmax
));
9609 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9610 build_call_expr (isge_fn
, 2, arg
,
9611 build_real (type
, rmin
)));
9621 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9622 ARG is the argument for the call. */
9625 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9627 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9630 if (!validate_arg (arg
, REAL_TYPE
))
9633 switch (builtin_index
)
9635 case BUILT_IN_ISINF
:
9636 if (!HONOR_INFINITIES (arg
))
9637 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9639 if (TREE_CODE (arg
) == REAL_CST
)
9641 r
= TREE_REAL_CST (arg
);
9642 if (real_isinf (&r
))
9643 return real_compare (GT_EXPR
, &r
, &dconst0
)
9644 ? integer_one_node
: integer_minus_one_node
;
9646 return integer_zero_node
;
9651 case BUILT_IN_ISINF_SIGN
:
9653 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9654 /* In a boolean context, GCC will fold the inner COND_EXPR to
9655 1. So e.g. "if (isinf_sign(x))" would be folded to just
9656 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9657 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9658 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9659 tree tmp
= NULL_TREE
;
9661 arg
= builtin_save_expr (arg
);
9663 if (signbit_fn
&& isinf_fn
)
9665 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9666 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9668 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9669 signbit_call
, integer_zero_node
);
9670 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9671 isinf_call
, integer_zero_node
);
9673 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9674 integer_minus_one_node
, integer_one_node
);
9675 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9683 case BUILT_IN_ISFINITE
:
9684 if (!HONOR_NANS (arg
)
9685 && !HONOR_INFINITIES (arg
))
9686 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9688 if (TREE_CODE (arg
) == REAL_CST
)
9690 r
= TREE_REAL_CST (arg
);
9691 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9696 case BUILT_IN_ISNAN
:
9697 if (!HONOR_NANS (arg
))
9698 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9700 if (TREE_CODE (arg
) == REAL_CST
)
9702 r
= TREE_REAL_CST (arg
);
9703 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9706 arg
= builtin_save_expr (arg
);
9707 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9714 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9715 This builtin will generate code to return the appropriate floating
9716 point classification depending on the value of the floating point
9717 number passed in. The possible return values must be supplied as
9718 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9719 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9720 one floating point argument which is "type generic". */
9723 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9725 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9726 arg
, type
, res
, tmp
;
9731 /* Verify the required arguments in the original call. */
9733 || !validate_arg (args
[0], INTEGER_TYPE
)
9734 || !validate_arg (args
[1], INTEGER_TYPE
)
9735 || !validate_arg (args
[2], INTEGER_TYPE
)
9736 || !validate_arg (args
[3], INTEGER_TYPE
)
9737 || !validate_arg (args
[4], INTEGER_TYPE
)
9738 || !validate_arg (args
[5], REAL_TYPE
))
9742 fp_infinite
= args
[1];
9743 fp_normal
= args
[2];
9744 fp_subnormal
= args
[3];
9747 type
= TREE_TYPE (arg
);
9748 mode
= TYPE_MODE (type
);
9749 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9753 (fabs(x) == Inf ? FP_INFINITE :
9754 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9755 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9757 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9758 build_real (type
, dconst0
));
9759 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9760 tmp
, fp_zero
, fp_subnormal
);
9762 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9763 real_from_string (&r
, buf
);
9764 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9765 arg
, build_real (type
, r
));
9766 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9768 if (HONOR_INFINITIES (mode
))
9771 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9772 build_real (type
, r
));
9773 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9777 if (HONOR_NANS (mode
))
9779 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9780 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9786 /* Fold a call to an unordered comparison function such as
9787 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9788 being called and ARG0 and ARG1 are the arguments for the call.
9789 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9790 the opposite of the desired result. UNORDERED_CODE is used
9791 for modes that can hold NaNs and ORDERED_CODE is used for
9795 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9796 enum tree_code unordered_code
,
9797 enum tree_code ordered_code
)
9799 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9800 enum tree_code code
;
9802 enum tree_code code0
, code1
;
9803 tree cmp_type
= NULL_TREE
;
9805 type0
= TREE_TYPE (arg0
);
9806 type1
= TREE_TYPE (arg1
);
9808 code0
= TREE_CODE (type0
);
9809 code1
= TREE_CODE (type1
);
9811 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9812 /* Choose the wider of two real types. */
9813 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9815 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9817 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9820 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9821 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9823 if (unordered_code
== UNORDERED_EXPR
)
9825 if (!HONOR_NANS (arg0
))
9826 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9827 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9830 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9831 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9832 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9835 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9836 arithmetics if it can never overflow, or into internal functions that
9837 return both result of arithmetics and overflowed boolean flag in
9838 a complex integer result, or some other check for overflow. */
9841 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9842 tree arg0
, tree arg1
, tree arg2
)
9844 enum internal_fn ifn
= IFN_LAST
;
9845 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9846 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9849 case BUILT_IN_ADD_OVERFLOW
:
9850 case BUILT_IN_SADD_OVERFLOW
:
9851 case BUILT_IN_SADDL_OVERFLOW
:
9852 case BUILT_IN_SADDLL_OVERFLOW
:
9853 case BUILT_IN_UADD_OVERFLOW
:
9854 case BUILT_IN_UADDL_OVERFLOW
:
9855 case BUILT_IN_UADDLL_OVERFLOW
:
9856 ifn
= IFN_ADD_OVERFLOW
;
9858 case BUILT_IN_SUB_OVERFLOW
:
9859 case BUILT_IN_SSUB_OVERFLOW
:
9860 case BUILT_IN_SSUBL_OVERFLOW
:
9861 case BUILT_IN_SSUBLL_OVERFLOW
:
9862 case BUILT_IN_USUB_OVERFLOW
:
9863 case BUILT_IN_USUBL_OVERFLOW
:
9864 case BUILT_IN_USUBLL_OVERFLOW
:
9865 ifn
= IFN_SUB_OVERFLOW
;
9867 case BUILT_IN_MUL_OVERFLOW
:
9868 case BUILT_IN_SMUL_OVERFLOW
:
9869 case BUILT_IN_SMULL_OVERFLOW
:
9870 case BUILT_IN_SMULLL_OVERFLOW
:
9871 case BUILT_IN_UMUL_OVERFLOW
:
9872 case BUILT_IN_UMULL_OVERFLOW
:
9873 case BUILT_IN_UMULLL_OVERFLOW
:
9874 ifn
= IFN_MUL_OVERFLOW
;
9879 tree ctype
= build_complex_type (type
);
9880 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9882 tree tgt
= save_expr (call
);
9883 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9884 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9885 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9887 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9888 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9891 /* Fold a call to built-in function FNDECL with 0 arguments.
9892 This function returns NULL_TREE if no simplification was possible. */
9895 fold_builtin_0 (location_t loc
, tree fndecl
)
9897 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9898 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9901 CASE_FLT_FN (BUILT_IN_INF
):
9902 case BUILT_IN_INFD32
:
9903 case BUILT_IN_INFD64
:
9904 case BUILT_IN_INFD128
:
9905 return fold_builtin_inf (loc
, type
, true);
9907 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9908 return fold_builtin_inf (loc
, type
, false);
9910 case BUILT_IN_CLASSIFY_TYPE
:
9911 return fold_builtin_classify_type (NULL_TREE
);
9919 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9920 This function returns NULL_TREE if no simplification was possible. */
9923 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9925 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9926 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9929 case BUILT_IN_CONSTANT_P
:
9931 tree val
= fold_builtin_constant_p (arg0
);
9933 /* Gimplification will pull the CALL_EXPR for the builtin out of
9934 an if condition. When not optimizing, we'll not CSE it back.
9935 To avoid link error types of regressions, return false now. */
9936 if (!val
&& !optimize
)
9937 val
= integer_zero_node
;
9942 case BUILT_IN_CLASSIFY_TYPE
:
9943 return fold_builtin_classify_type (arg0
);
9945 case BUILT_IN_STRLEN
:
9946 return fold_builtin_strlen (loc
, type
, arg0
);
9948 CASE_FLT_FN (BUILT_IN_FABS
):
9949 case BUILT_IN_FABSD32
:
9950 case BUILT_IN_FABSD64
:
9951 case BUILT_IN_FABSD128
:
9952 return fold_builtin_fabs (loc
, arg0
, type
);
9956 case BUILT_IN_LLABS
:
9957 case BUILT_IN_IMAXABS
:
9958 return fold_builtin_abs (loc
, arg0
, type
);
9960 CASE_FLT_FN (BUILT_IN_CONJ
):
9961 if (validate_arg (arg0
, COMPLEX_TYPE
)
9962 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9963 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9966 CASE_FLT_FN (BUILT_IN_CREAL
):
9967 if (validate_arg (arg0
, COMPLEX_TYPE
)
9968 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9969 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
9972 CASE_FLT_FN (BUILT_IN_CIMAG
):
9973 if (validate_arg (arg0
, COMPLEX_TYPE
)
9974 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9975 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9978 CASE_FLT_FN (BUILT_IN_CCOS
):
9979 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9981 CASE_FLT_FN (BUILT_IN_CCOSH
):
9982 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9984 CASE_FLT_FN (BUILT_IN_CPROJ
):
9985 return fold_builtin_cproj (loc
, arg0
, type
);
9987 CASE_FLT_FN (BUILT_IN_CSIN
):
9988 if (validate_arg (arg0
, COMPLEX_TYPE
)
9989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9990 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9993 CASE_FLT_FN (BUILT_IN_CSINH
):
9994 if (validate_arg (arg0
, COMPLEX_TYPE
)
9995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9996 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9999 CASE_FLT_FN (BUILT_IN_CTAN
):
10000 if (validate_arg (arg0
, COMPLEX_TYPE
)
10001 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10002 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10005 CASE_FLT_FN (BUILT_IN_CTANH
):
10006 if (validate_arg (arg0
, COMPLEX_TYPE
)
10007 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10008 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10011 CASE_FLT_FN (BUILT_IN_CLOG
):
10012 if (validate_arg (arg0
, COMPLEX_TYPE
)
10013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10014 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10017 CASE_FLT_FN (BUILT_IN_CSQRT
):
10018 if (validate_arg (arg0
, COMPLEX_TYPE
)
10019 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10020 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10023 CASE_FLT_FN (BUILT_IN_CASIN
):
10024 if (validate_arg (arg0
, COMPLEX_TYPE
)
10025 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10026 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10029 CASE_FLT_FN (BUILT_IN_CACOS
):
10030 if (validate_arg (arg0
, COMPLEX_TYPE
)
10031 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10032 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10035 CASE_FLT_FN (BUILT_IN_CATAN
):
10036 if (validate_arg (arg0
, COMPLEX_TYPE
)
10037 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10038 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10041 CASE_FLT_FN (BUILT_IN_CASINH
):
10042 if (validate_arg (arg0
, COMPLEX_TYPE
)
10043 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10044 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10047 CASE_FLT_FN (BUILT_IN_CACOSH
):
10048 if (validate_arg (arg0
, COMPLEX_TYPE
)
10049 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10050 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10053 CASE_FLT_FN (BUILT_IN_CATANH
):
10054 if (validate_arg (arg0
, COMPLEX_TYPE
)
10055 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10056 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10059 CASE_FLT_FN (BUILT_IN_CABS
):
10060 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10062 CASE_FLT_FN (BUILT_IN_CARG
):
10063 return fold_builtin_carg (loc
, arg0
, type
);
10065 CASE_FLT_FN (BUILT_IN_SQRT
):
10066 return fold_builtin_sqrt (loc
, arg0
, type
);
10068 CASE_FLT_FN (BUILT_IN_CBRT
):
10069 return fold_builtin_cbrt (loc
, arg0
, type
);
10071 CASE_FLT_FN (BUILT_IN_ASIN
):
10072 if (validate_arg (arg0
, REAL_TYPE
))
10073 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10074 &dconstm1
, &dconst1
, true);
10077 CASE_FLT_FN (BUILT_IN_ACOS
):
10078 if (validate_arg (arg0
, REAL_TYPE
))
10079 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10080 &dconstm1
, &dconst1
, true);
10083 CASE_FLT_FN (BUILT_IN_ATAN
):
10084 if (validate_arg (arg0
, REAL_TYPE
))
10085 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10088 CASE_FLT_FN (BUILT_IN_ASINH
):
10089 if (validate_arg (arg0
, REAL_TYPE
))
10090 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10093 CASE_FLT_FN (BUILT_IN_ACOSH
):
10094 if (validate_arg (arg0
, REAL_TYPE
))
10095 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10096 &dconst1
, NULL
, true);
10099 CASE_FLT_FN (BUILT_IN_ATANH
):
10100 if (validate_arg (arg0
, REAL_TYPE
))
10101 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10102 &dconstm1
, &dconst1
, false);
10105 CASE_FLT_FN (BUILT_IN_SIN
):
10106 if (validate_arg (arg0
, REAL_TYPE
))
10107 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10110 CASE_FLT_FN (BUILT_IN_COS
):
10111 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10113 CASE_FLT_FN (BUILT_IN_TAN
):
10114 return fold_builtin_tan (arg0
, type
);
10116 CASE_FLT_FN (BUILT_IN_CEXP
):
10117 return fold_builtin_cexp (loc
, arg0
, type
);
10119 CASE_FLT_FN (BUILT_IN_CEXPI
):
10120 if (validate_arg (arg0
, REAL_TYPE
))
10121 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10124 CASE_FLT_FN (BUILT_IN_SINH
):
10125 if (validate_arg (arg0
, REAL_TYPE
))
10126 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10129 CASE_FLT_FN (BUILT_IN_COSH
):
10130 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10132 CASE_FLT_FN (BUILT_IN_TANH
):
10133 if (validate_arg (arg0
, REAL_TYPE
))
10134 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10137 CASE_FLT_FN (BUILT_IN_ERF
):
10138 if (validate_arg (arg0
, REAL_TYPE
))
10139 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10142 CASE_FLT_FN (BUILT_IN_ERFC
):
10143 if (validate_arg (arg0
, REAL_TYPE
))
10144 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10147 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10148 if (validate_arg (arg0
, REAL_TYPE
))
10149 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10152 CASE_FLT_FN (BUILT_IN_EXP
):
10153 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10155 CASE_FLT_FN (BUILT_IN_EXP2
):
10156 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10158 CASE_FLT_FN (BUILT_IN_EXP10
):
10159 CASE_FLT_FN (BUILT_IN_POW10
):
10160 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10162 CASE_FLT_FN (BUILT_IN_EXPM1
):
10163 if (validate_arg (arg0
, REAL_TYPE
))
10164 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10167 CASE_FLT_FN (BUILT_IN_LOG
):
10168 if (validate_arg (arg0
, REAL_TYPE
))
10169 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10172 CASE_FLT_FN (BUILT_IN_LOG2
):
10173 if (validate_arg (arg0
, REAL_TYPE
))
10174 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10177 CASE_FLT_FN (BUILT_IN_LOG10
):
10178 if (validate_arg (arg0
, REAL_TYPE
))
10179 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10182 CASE_FLT_FN (BUILT_IN_LOG1P
):
10183 if (validate_arg (arg0
, REAL_TYPE
))
10184 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10185 &dconstm1
, NULL
, false);
10188 CASE_FLT_FN (BUILT_IN_J0
):
10189 if (validate_arg (arg0
, REAL_TYPE
))
10190 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10194 CASE_FLT_FN (BUILT_IN_J1
):
10195 if (validate_arg (arg0
, REAL_TYPE
))
10196 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10200 CASE_FLT_FN (BUILT_IN_Y0
):
10201 if (validate_arg (arg0
, REAL_TYPE
))
10202 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10203 &dconst0
, NULL
, false);
10206 CASE_FLT_FN (BUILT_IN_Y1
):
10207 if (validate_arg (arg0
, REAL_TYPE
))
10208 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10209 &dconst0
, NULL
, false);
10212 CASE_FLT_FN (BUILT_IN_NAN
):
10213 case BUILT_IN_NAND32
:
10214 case BUILT_IN_NAND64
:
10215 case BUILT_IN_NAND128
:
10216 return fold_builtin_nan (arg0
, type
, true);
10218 CASE_FLT_FN (BUILT_IN_NANS
):
10219 return fold_builtin_nan (arg0
, type
, false);
10221 CASE_FLT_FN (BUILT_IN_FLOOR
):
10222 return fold_builtin_floor (loc
, fndecl
, arg0
);
10224 CASE_FLT_FN (BUILT_IN_CEIL
):
10225 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10227 CASE_FLT_FN (BUILT_IN_TRUNC
):
10228 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10230 CASE_FLT_FN (BUILT_IN_ROUND
):
10231 return fold_builtin_round (loc
, fndecl
, arg0
);
10233 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10234 CASE_FLT_FN (BUILT_IN_RINT
):
10235 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10237 CASE_FLT_FN (BUILT_IN_ICEIL
):
10238 CASE_FLT_FN (BUILT_IN_LCEIL
):
10239 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10240 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10241 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10242 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10243 CASE_FLT_FN (BUILT_IN_IROUND
):
10244 CASE_FLT_FN (BUILT_IN_LROUND
):
10245 CASE_FLT_FN (BUILT_IN_LLROUND
):
10246 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10248 CASE_FLT_FN (BUILT_IN_IRINT
):
10249 CASE_FLT_FN (BUILT_IN_LRINT
):
10250 CASE_FLT_FN (BUILT_IN_LLRINT
):
10251 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10253 case BUILT_IN_BSWAP16
:
10254 case BUILT_IN_BSWAP32
:
10255 case BUILT_IN_BSWAP64
:
10256 return fold_builtin_bswap (fndecl
, arg0
);
10258 CASE_INT_FN (BUILT_IN_FFS
):
10259 CASE_INT_FN (BUILT_IN_CLZ
):
10260 CASE_INT_FN (BUILT_IN_CTZ
):
10261 CASE_INT_FN (BUILT_IN_CLRSB
):
10262 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10263 CASE_INT_FN (BUILT_IN_PARITY
):
10264 return fold_builtin_bitop (fndecl
, arg0
);
10266 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10267 return fold_builtin_signbit (loc
, arg0
, type
);
10269 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10270 return fold_builtin_significand (loc
, arg0
, type
);
10272 CASE_FLT_FN (BUILT_IN_ILOGB
):
10273 CASE_FLT_FN (BUILT_IN_LOGB
):
10274 return fold_builtin_logb (loc
, arg0
, type
);
10276 case BUILT_IN_ISASCII
:
10277 return fold_builtin_isascii (loc
, arg0
);
10279 case BUILT_IN_TOASCII
:
10280 return fold_builtin_toascii (loc
, arg0
);
10282 case BUILT_IN_ISDIGIT
:
10283 return fold_builtin_isdigit (loc
, arg0
);
10285 CASE_FLT_FN (BUILT_IN_FINITE
):
10286 case BUILT_IN_FINITED32
:
10287 case BUILT_IN_FINITED64
:
10288 case BUILT_IN_FINITED128
:
10289 case BUILT_IN_ISFINITE
:
10291 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10294 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10297 CASE_FLT_FN (BUILT_IN_ISINF
):
10298 case BUILT_IN_ISINFD32
:
10299 case BUILT_IN_ISINFD64
:
10300 case BUILT_IN_ISINFD128
:
10302 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10305 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10308 case BUILT_IN_ISNORMAL
:
10309 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10311 case BUILT_IN_ISINF_SIGN
:
10312 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10314 CASE_FLT_FN (BUILT_IN_ISNAN
):
10315 case BUILT_IN_ISNAND32
:
10316 case BUILT_IN_ISNAND64
:
10317 case BUILT_IN_ISNAND128
:
10318 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10320 case BUILT_IN_FREE
:
10321 if (integer_zerop (arg0
))
10322 return build_empty_stmt (loc
);
10333 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10334 This function returns NULL_TREE if no simplification was possible. */
10337 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10339 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10340 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10344 CASE_FLT_FN (BUILT_IN_JN
):
10345 if (validate_arg (arg0
, INTEGER_TYPE
)
10346 && validate_arg (arg1
, REAL_TYPE
))
10347 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10350 CASE_FLT_FN (BUILT_IN_YN
):
10351 if (validate_arg (arg0
, INTEGER_TYPE
)
10352 && validate_arg (arg1
, REAL_TYPE
))
10353 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10357 CASE_FLT_FN (BUILT_IN_DREM
):
10358 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10359 if (validate_arg (arg0
, REAL_TYPE
)
10360 && validate_arg (arg1
, REAL_TYPE
))
10361 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10364 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10365 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10366 if (validate_arg (arg0
, REAL_TYPE
)
10367 && validate_arg (arg1
, POINTER_TYPE
))
10368 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10371 CASE_FLT_FN (BUILT_IN_ATAN2
):
10372 if (validate_arg (arg0
, REAL_TYPE
)
10373 && validate_arg (arg1
, REAL_TYPE
))
10374 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10377 CASE_FLT_FN (BUILT_IN_FDIM
):
10378 if (validate_arg (arg0
, REAL_TYPE
)
10379 && validate_arg (arg1
, REAL_TYPE
))
10380 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10383 CASE_FLT_FN (BUILT_IN_HYPOT
):
10384 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10386 CASE_FLT_FN (BUILT_IN_CPOW
):
10387 if (validate_arg (arg0
, COMPLEX_TYPE
)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10389 && validate_arg (arg1
, COMPLEX_TYPE
)
10390 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10391 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10394 CASE_FLT_FN (BUILT_IN_LDEXP
):
10395 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10396 CASE_FLT_FN (BUILT_IN_SCALBN
):
10397 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10398 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10399 type
, /*ldexp=*/false);
10401 CASE_FLT_FN (BUILT_IN_FREXP
):
10402 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10404 CASE_FLT_FN (BUILT_IN_MODF
):
10405 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10407 case BUILT_IN_STRSTR
:
10408 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10410 case BUILT_IN_STRSPN
:
10411 return fold_builtin_strspn (loc
, arg0
, arg1
);
10413 case BUILT_IN_STRCSPN
:
10414 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10416 case BUILT_IN_STRCHR
:
10417 case BUILT_IN_INDEX
:
10418 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10420 case BUILT_IN_STRRCHR
:
10421 case BUILT_IN_RINDEX
:
10422 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10424 case BUILT_IN_STRCMP
:
10425 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10427 case BUILT_IN_STRPBRK
:
10428 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10430 case BUILT_IN_EXPECT
:
10431 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10433 CASE_FLT_FN (BUILT_IN_POW
):
10434 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10436 CASE_FLT_FN (BUILT_IN_POWI
):
10437 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10439 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10440 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10442 CASE_FLT_FN (BUILT_IN_FMIN
):
10443 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10445 CASE_FLT_FN (BUILT_IN_FMAX
):
10446 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10448 case BUILT_IN_ISGREATER
:
10449 return fold_builtin_unordered_cmp (loc
, fndecl
,
10450 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10451 case BUILT_IN_ISGREATEREQUAL
:
10452 return fold_builtin_unordered_cmp (loc
, fndecl
,
10453 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10454 case BUILT_IN_ISLESS
:
10455 return fold_builtin_unordered_cmp (loc
, fndecl
,
10456 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10457 case BUILT_IN_ISLESSEQUAL
:
10458 return fold_builtin_unordered_cmp (loc
, fndecl
,
10459 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10460 case BUILT_IN_ISLESSGREATER
:
10461 return fold_builtin_unordered_cmp (loc
, fndecl
,
10462 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10463 case BUILT_IN_ISUNORDERED
:
10464 return fold_builtin_unordered_cmp (loc
, fndecl
,
10465 arg0
, arg1
, UNORDERED_EXPR
,
10468 /* We do the folding for va_start in the expander. */
10469 case BUILT_IN_VA_START
:
10472 case BUILT_IN_OBJECT_SIZE
:
10473 return fold_builtin_object_size (arg0
, arg1
);
10475 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10476 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10478 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10479 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10487 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10489 This function returns NULL_TREE if no simplification was possible. */
10492 fold_builtin_3 (location_t loc
, tree fndecl
,
10493 tree arg0
, tree arg1
, tree arg2
)
10495 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10496 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10500 CASE_FLT_FN (BUILT_IN_SINCOS
):
10501 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10503 CASE_FLT_FN (BUILT_IN_FMA
):
10504 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10507 CASE_FLT_FN (BUILT_IN_REMQUO
):
10508 if (validate_arg (arg0
, REAL_TYPE
)
10509 && validate_arg (arg1
, REAL_TYPE
)
10510 && validate_arg (arg2
, POINTER_TYPE
))
10511 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10514 case BUILT_IN_STRNCMP
:
10515 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10517 case BUILT_IN_MEMCHR
:
10518 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10520 case BUILT_IN_BCMP
:
10521 case BUILT_IN_MEMCMP
:
10522 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10524 case BUILT_IN_EXPECT
:
10525 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10527 case BUILT_IN_ADD_OVERFLOW
:
10528 case BUILT_IN_SUB_OVERFLOW
:
10529 case BUILT_IN_MUL_OVERFLOW
:
10530 case BUILT_IN_SADD_OVERFLOW
:
10531 case BUILT_IN_SADDL_OVERFLOW
:
10532 case BUILT_IN_SADDLL_OVERFLOW
:
10533 case BUILT_IN_SSUB_OVERFLOW
:
10534 case BUILT_IN_SSUBL_OVERFLOW
:
10535 case BUILT_IN_SSUBLL_OVERFLOW
:
10536 case BUILT_IN_SMUL_OVERFLOW
:
10537 case BUILT_IN_SMULL_OVERFLOW
:
10538 case BUILT_IN_SMULLL_OVERFLOW
:
10539 case BUILT_IN_UADD_OVERFLOW
:
10540 case BUILT_IN_UADDL_OVERFLOW
:
10541 case BUILT_IN_UADDLL_OVERFLOW
:
10542 case BUILT_IN_USUB_OVERFLOW
:
10543 case BUILT_IN_USUBL_OVERFLOW
:
10544 case BUILT_IN_USUBLL_OVERFLOW
:
10545 case BUILT_IN_UMUL_OVERFLOW
:
10546 case BUILT_IN_UMULL_OVERFLOW
:
10547 case BUILT_IN_UMULLL_OVERFLOW
:
10548 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10556 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10557 arguments. IGNORE is true if the result of the
10558 function call is ignored. This function returns NULL_TREE if no
10559 simplification was possible. */
10562 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10564 tree ret
= NULL_TREE
;
10569 ret
= fold_builtin_0 (loc
, fndecl
);
10572 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10575 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10578 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10581 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10586 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10587 SET_EXPR_LOCATION (ret
, loc
);
10588 TREE_NO_WARNING (ret
) = 1;
10594 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10595 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10596 of arguments in ARGS to be omitted. OLDNARGS is the number of
10597 elements in ARGS. */
10600 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10601 int skip
, tree fndecl
, int n
, va_list newargs
)
10603 int nargs
= oldnargs
- skip
+ n
;
10610 buffer
= XALLOCAVEC (tree
, nargs
);
10611 for (i
= 0; i
< n
; i
++)
10612 buffer
[i
] = va_arg (newargs
, tree
);
10613 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10614 buffer
[i
] = args
[j
];
10617 buffer
= args
+ skip
;
10619 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10622 /* Return true if FNDECL shouldn't be folded right now.
10623 If a built-in function has an inline attribute always_inline
10624 wrapper, defer folding it after always_inline functions have
10625 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10626 might not be performed. */
10629 avoid_folding_inline_builtin (tree fndecl
)
10631 return (DECL_DECLARED_INLINE_P (fndecl
)
10632 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10634 && !cfun
->always_inline_functions_inlined
10635 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10638 /* A wrapper function for builtin folding that prevents warnings for
10639 "statement without effect" and the like, caused by removing the
10640 call node earlier than the warning is generated. */
10643 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10645 tree ret
= NULL_TREE
;
10646 tree fndecl
= get_callee_fndecl (exp
);
10648 && TREE_CODE (fndecl
) == FUNCTION_DECL
10649 && DECL_BUILT_IN (fndecl
)
10650 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10651 yet. Defer folding until we see all the arguments
10652 (after inlining). */
10653 && !CALL_EXPR_VA_ARG_PACK (exp
))
10655 int nargs
= call_expr_nargs (exp
);
10657 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10658 instead last argument is __builtin_va_arg_pack (). Defer folding
10659 even in that case, until arguments are finalized. */
10660 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10662 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10664 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10665 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10666 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10670 if (avoid_folding_inline_builtin (fndecl
))
10673 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10674 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10675 CALL_EXPR_ARGP (exp
), ignore
);
10678 tree
*args
= CALL_EXPR_ARGP (exp
);
10679 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10687 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10688 N arguments are passed in the array ARGARRAY. Return a folded
10689 expression or NULL_TREE if no simplification was possible. */
10692 fold_builtin_call_array (location_t loc
, tree
,
10697 if (TREE_CODE (fn
) != ADDR_EXPR
)
10700 tree fndecl
= TREE_OPERAND (fn
, 0);
10701 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10702 && DECL_BUILT_IN (fndecl
))
10704 /* If last argument is __builtin_va_arg_pack (), arguments to this
10705 function are not finalized yet. Defer folding until they are. */
10706 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10708 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10710 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10711 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10712 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10715 if (avoid_folding_inline_builtin (fndecl
))
10717 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10718 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10720 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10726 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10727 along with N new arguments specified as the "..." parameters. SKIP
10728 is the number of arguments in EXP to be omitted. This function is used
10729 to do varargs-to-varargs transformations. */
10732 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10738 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10739 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10745 /* Validate a single argument ARG against a tree code CODE representing
10749 validate_arg (const_tree arg
, enum tree_code code
)
10753 else if (code
== POINTER_TYPE
)
10754 return POINTER_TYPE_P (TREE_TYPE (arg
));
10755 else if (code
== INTEGER_TYPE
)
10756 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10757 return code
== TREE_CODE (TREE_TYPE (arg
));
10760 /* This function validates the types of a function call argument list
10761 against a specified list of tree_codes. If the last specifier is a 0,
10762 that represents an ellipses, otherwise the last specifier must be a
10765 This is the GIMPLE version of validate_arglist. Eventually we want to
10766 completely convert builtins.c to work from GIMPLEs and the tree based
10767 validate_arglist will then be removed. */
10770 validate_gimple_arglist (const gcall
*call
, ...)
10772 enum tree_code code
;
10778 va_start (ap
, call
);
10783 code
= (enum tree_code
) va_arg (ap
, int);
10787 /* This signifies an ellipses, any further arguments are all ok. */
10791 /* This signifies an endlink, if no arguments remain, return
10792 true, otherwise return false. */
10793 res
= (i
== gimple_call_num_args (call
));
10796 /* If no parameters remain or the parameter's code does not
10797 match the specified code, return false. Otherwise continue
10798 checking any remaining arguments. */
10799 arg
= gimple_call_arg (call
, i
++);
10800 if (!validate_arg (arg
, code
))
10807 /* We need gotos here since we can only have one VA_CLOSE in a
10815 /* Default target-specific builtin expander that does nothing. */
10818 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10819 rtx target ATTRIBUTE_UNUSED
,
10820 rtx subtarget ATTRIBUTE_UNUSED
,
10821 machine_mode mode ATTRIBUTE_UNUSED
,
10822 int ignore ATTRIBUTE_UNUSED
)
10827 /* Returns true is EXP represents data that would potentially reside
10828 in a readonly section. */
10831 readonly_data_expr (tree exp
)
10835 if (TREE_CODE (exp
) != ADDR_EXPR
)
10838 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10842 /* Make sure we call decl_readonly_section only for trees it
10843 can handle (since it returns true for everything it doesn't
10845 if (TREE_CODE (exp
) == STRING_CST
10846 || TREE_CODE (exp
) == CONSTRUCTOR
10847 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10848 return decl_readonly_section (exp
, 0);
10853 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10854 to the call, and TYPE is its return type.
10856 Return NULL_TREE if no simplification was possible, otherwise return the
10857 simplified form of the call as a tree.
10859 The simplified form may be a constant or other expression which
10860 computes the same value, but in a more efficient manner (including
10861 calls to other builtin functions).
10863 The call may contain arguments which need to be evaluated, but
10864 which are not useful to determine the result of the call. In
10865 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10866 COMPOUND_EXPR will be an argument which must be evaluated.
10867 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10868 COMPOUND_EXPR in the chain will contain the tree for the simplified
10869 form of the builtin function call. */
10872 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10874 if (!validate_arg (s1
, POINTER_TYPE
)
10875 || !validate_arg (s2
, POINTER_TYPE
))
10880 const char *p1
, *p2
;
10882 p2
= c_getstr (s2
);
10886 p1
= c_getstr (s1
);
10889 const char *r
= strstr (p1
, p2
);
10893 return build_int_cst (TREE_TYPE (s1
), 0);
10895 /* Return an offset into the constant string argument. */
10896 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10897 return fold_convert_loc (loc
, type
, tem
);
10900 /* The argument is const char *, and the result is char *, so we need
10901 a type conversion here to avoid a warning. */
10903 return fold_convert_loc (loc
, type
, s1
);
10908 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10912 /* New argument list transforming strstr(s1, s2) to
10913 strchr(s1, s2[0]). */
10914 return build_call_expr_loc (loc
, fn
, 2, s1
,
10915 build_int_cst (integer_type_node
, p2
[0]));
10919 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10920 the call, and TYPE is its return type.
10922 Return NULL_TREE if no simplification was possible, otherwise return the
10923 simplified form of the call as a tree.
10925 The simplified form may be a constant or other expression which
10926 computes the same value, but in a more efficient manner (including
10927 calls to other builtin functions).
10929 The call may contain arguments which need to be evaluated, but
10930 which are not useful to determine the result of the call. In
10931 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10932 COMPOUND_EXPR will be an argument which must be evaluated.
10933 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10934 COMPOUND_EXPR in the chain will contain the tree for the simplified
10935 form of the builtin function call. */
10938 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10940 if (!validate_arg (s1
, POINTER_TYPE
)
10941 || !validate_arg (s2
, INTEGER_TYPE
))
10947 if (TREE_CODE (s2
) != INTEGER_CST
)
10950 p1
= c_getstr (s1
);
10957 if (target_char_cast (s2
, &c
))
10960 r
= strchr (p1
, c
);
10963 return build_int_cst (TREE_TYPE (s1
), 0);
10965 /* Return an offset into the constant string argument. */
10966 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10967 return fold_convert_loc (loc
, type
, tem
);
10973 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10974 the call, and TYPE is its return type.
10976 Return NULL_TREE if no simplification was possible, otherwise return the
10977 simplified form of the call as a tree.
10979 The simplified form may be a constant or other expression which
10980 computes the same value, but in a more efficient manner (including
10981 calls to other builtin functions).
10983 The call may contain arguments which need to be evaluated, but
10984 which are not useful to determine the result of the call. In
10985 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10986 COMPOUND_EXPR will be an argument which must be evaluated.
10987 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10988 COMPOUND_EXPR in the chain will contain the tree for the simplified
10989 form of the builtin function call. */
10992 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10994 if (!validate_arg (s1
, POINTER_TYPE
)
10995 || !validate_arg (s2
, INTEGER_TYPE
))
11002 if (TREE_CODE (s2
) != INTEGER_CST
)
11005 p1
= c_getstr (s1
);
11012 if (target_char_cast (s2
, &c
))
11015 r
= strrchr (p1
, c
);
11018 return build_int_cst (TREE_TYPE (s1
), 0);
11020 /* Return an offset into the constant string argument. */
11021 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11022 return fold_convert_loc (loc
, type
, tem
);
11025 if (! integer_zerop (s2
))
11028 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11032 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11033 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11037 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11038 to the call, and TYPE is its return type.
11040 Return NULL_TREE if no simplification was possible, otherwise return the
11041 simplified form of the call as a tree.
11043 The simplified form may be a constant or other expression which
11044 computes the same value, but in a more efficient manner (including
11045 calls to other builtin functions).
11047 The call may contain arguments which need to be evaluated, but
11048 which are not useful to determine the result of the call. In
11049 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11050 COMPOUND_EXPR will be an argument which must be evaluated.
11051 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11052 COMPOUND_EXPR in the chain will contain the tree for the simplified
11053 form of the builtin function call. */
11056 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11058 if (!validate_arg (s1
, POINTER_TYPE
)
11059 || !validate_arg (s2
, POINTER_TYPE
))
11064 const char *p1
, *p2
;
11066 p2
= c_getstr (s2
);
11070 p1
= c_getstr (s1
);
11073 const char *r
= strpbrk (p1
, p2
);
11077 return build_int_cst (TREE_TYPE (s1
), 0);
11079 /* Return an offset into the constant string argument. */
11080 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11081 return fold_convert_loc (loc
, type
, tem
);
11085 /* strpbrk(x, "") == NULL.
11086 Evaluate and ignore s1 in case it had side-effects. */
11087 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11090 return NULL_TREE
; /* Really call strpbrk. */
11092 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11096 /* New argument list transforming strpbrk(s1, s2) to
11097 strchr(s1, s2[0]). */
11098 return build_call_expr_loc (loc
, fn
, 2, s1
,
11099 build_int_cst (integer_type_node
, p2
[0]));
11103 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11106 Return NULL_TREE if no simplification was possible, otherwise return the
11107 simplified form of the call as a tree.
11109 The simplified form may be a constant or other expression which
11110 computes the same value, but in a more efficient manner (including
11111 calls to other builtin functions).
11113 The call may contain arguments which need to be evaluated, but
11114 which are not useful to determine the result of the call. In
11115 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11116 COMPOUND_EXPR will be an argument which must be evaluated.
11117 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11118 COMPOUND_EXPR in the chain will contain the tree for the simplified
11119 form of the builtin function call. */
11122 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11124 if (!validate_arg (s1
, POINTER_TYPE
)
11125 || !validate_arg (s2
, POINTER_TYPE
))
11129 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11131 /* If both arguments are constants, evaluate at compile-time. */
11134 const size_t r
= strspn (p1
, p2
);
11135 return build_int_cst (size_type_node
, r
);
11138 /* If either argument is "", return NULL_TREE. */
11139 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11140 /* Evaluate and ignore both arguments in case either one has
11142 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11148 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11151 Return NULL_TREE if no simplification was possible, otherwise return the
11152 simplified form of the call as a tree.
11154 The simplified form may be a constant or other expression which
11155 computes the same value, but in a more efficient manner (including
11156 calls to other builtin functions).
11158 The call may contain arguments which need to be evaluated, but
11159 which are not useful to determine the result of the call. In
11160 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11161 COMPOUND_EXPR will be an argument which must be evaluated.
11162 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11163 COMPOUND_EXPR in the chain will contain the tree for the simplified
11164 form of the builtin function call. */
11167 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11169 if (!validate_arg (s1
, POINTER_TYPE
)
11170 || !validate_arg (s2
, POINTER_TYPE
))
11174 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11176 /* If both arguments are constants, evaluate at compile-time. */
11179 const size_t r
= strcspn (p1
, p2
);
11180 return build_int_cst (size_type_node
, r
);
11183 /* If the first argument is "", return NULL_TREE. */
11184 if (p1
&& *p1
== '\0')
11186 /* Evaluate and ignore argument s2 in case it has
11188 return omit_one_operand_loc (loc
, size_type_node
,
11189 size_zero_node
, s2
);
11192 /* If the second argument is "", return __builtin_strlen(s1). */
11193 if (p2
&& *p2
== '\0')
11195 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11197 /* If the replacement _DECL isn't initialized, don't do the
11202 return build_call_expr_loc (loc
, fn
, 1, s1
);
11208 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11209 produced. False otherwise. This is done so that we don't output the error
11210 or warning twice or three times. */
11213 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11215 tree fntype
= TREE_TYPE (current_function_decl
);
11216 int nargs
= call_expr_nargs (exp
);
11218 /* There is good chance the current input_location points inside the
11219 definition of the va_start macro (perhaps on the token for
11220 builtin) in a system header, so warnings will not be emitted.
11221 Use the location in real source code. */
11222 source_location current_location
=
11223 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11226 if (!stdarg_p (fntype
))
11228 error ("%<va_start%> used in function with fixed args");
11234 if (va_start_p
&& (nargs
!= 2))
11236 error ("wrong number of arguments to function %<va_start%>");
11239 arg
= CALL_EXPR_ARG (exp
, 1);
11241 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11242 when we checked the arguments and if needed issued a warning. */
11247 /* Evidently an out of date version of <stdarg.h>; can't validate
11248 va_start's second argument, but can still work as intended. */
11249 warning_at (current_location
,
11251 "%<__builtin_next_arg%> called without an argument");
11254 else if (nargs
> 1)
11256 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11259 arg
= CALL_EXPR_ARG (exp
, 0);
11262 if (TREE_CODE (arg
) == SSA_NAME
)
11263 arg
= SSA_NAME_VAR (arg
);
11265 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11266 or __builtin_next_arg (0) the first time we see it, after checking
11267 the arguments and if needed issuing a warning. */
11268 if (!integer_zerop (arg
))
11270 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11272 /* Strip off all nops for the sake of the comparison. This
11273 is not quite the same as STRIP_NOPS. It does more.
11274 We must also strip off INDIRECT_EXPR for C++ reference
11276 while (CONVERT_EXPR_P (arg
)
11277 || TREE_CODE (arg
) == INDIRECT_REF
)
11278 arg
= TREE_OPERAND (arg
, 0);
11279 if (arg
!= last_parm
)
11281 /* FIXME: Sometimes with the tree optimizers we can get the
11282 not the last argument even though the user used the last
11283 argument. We just warn and set the arg to be the last
11284 argument so that we will get wrong-code because of
11286 warning_at (current_location
,
11288 "second parameter of %<va_start%> not last named argument");
11291 /* Undefined by C99 7.15.1.4p4 (va_start):
11292 "If the parameter parmN is declared with the register storage
11293 class, with a function or array type, or with a type that is
11294 not compatible with the type that results after application of
11295 the default argument promotions, the behavior is undefined."
11297 else if (DECL_REGISTER (arg
))
11299 warning_at (current_location
,
11301 "undefined behaviour when second parameter of "
11302 "%<va_start%> is declared with %<register%> storage");
11305 /* We want to verify the second parameter just once before the tree
11306 optimizers are run and then avoid keeping it in the tree,
11307 as otherwise we could warn even for correct code like:
11308 void foo (int i, ...)
11309 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11311 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11313 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11319 /* Expand a call EXP to __builtin_object_size. */
11322 expand_builtin_object_size (tree exp
)
11325 int object_size_type
;
11326 tree fndecl
= get_callee_fndecl (exp
);
11328 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11330 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11332 expand_builtin_trap ();
11336 ost
= CALL_EXPR_ARG (exp
, 1);
11339 if (TREE_CODE (ost
) != INTEGER_CST
11340 || tree_int_cst_sgn (ost
) < 0
11341 || compare_tree_int (ost
, 3) > 0)
11343 error ("%Klast argument of %D is not integer constant between 0 and 3",
11345 expand_builtin_trap ();
11349 object_size_type
= tree_to_shwi (ost
);
11351 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11354 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11355 FCODE is the BUILT_IN_* to use.
11356 Return NULL_RTX if we failed; the caller should emit a normal call,
11357 otherwise try to get the result in TARGET, if convenient (and in
11358 mode MODE if that's convenient). */
11361 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11362 enum built_in_function fcode
)
11364 tree dest
, src
, len
, size
;
11366 if (!validate_arglist (exp
,
11368 fcode
== BUILT_IN_MEMSET_CHK
11369 ? INTEGER_TYPE
: POINTER_TYPE
,
11370 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11373 dest
= CALL_EXPR_ARG (exp
, 0);
11374 src
= CALL_EXPR_ARG (exp
, 1);
11375 len
= CALL_EXPR_ARG (exp
, 2);
11376 size
= CALL_EXPR_ARG (exp
, 3);
11378 if (! tree_fits_uhwi_p (size
))
11381 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11385 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11387 warning_at (tree_nonartificial_location (exp
),
11388 0, "%Kcall to %D will always overflow destination buffer",
11389 exp
, get_callee_fndecl (exp
));
11394 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11395 mem{cpy,pcpy,move,set} is available. */
11398 case BUILT_IN_MEMCPY_CHK
:
11399 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11401 case BUILT_IN_MEMPCPY_CHK
:
11402 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11404 case BUILT_IN_MEMMOVE_CHK
:
11405 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11407 case BUILT_IN_MEMSET_CHK
:
11408 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11417 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11418 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11419 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11420 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11422 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11426 unsigned int dest_align
= get_pointer_alignment (dest
);
11428 /* If DEST is not a pointer type, call the normal function. */
11429 if (dest_align
== 0)
11432 /* If SRC and DEST are the same (and not volatile), do nothing. */
11433 if (operand_equal_p (src
, dest
, 0))
11437 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11439 /* Evaluate and ignore LEN in case it has side-effects. */
11440 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11441 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11444 expr
= fold_build_pointer_plus (dest
, len
);
11445 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11448 /* __memmove_chk special case. */
11449 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11451 unsigned int src_align
= get_pointer_alignment (src
);
11453 if (src_align
== 0)
11456 /* If src is categorized for a readonly section we can use
11457 normal __memcpy_chk. */
11458 if (readonly_data_expr (src
))
11460 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11463 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11464 dest
, src
, len
, size
);
11465 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11466 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11467 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11474 /* Emit warning if a buffer overflow is detected at compile time. */
11477 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11481 location_t loc
= tree_nonartificial_location (exp
);
11485 case BUILT_IN_STRCPY_CHK
:
11486 case BUILT_IN_STPCPY_CHK
:
11487 /* For __strcat_chk the warning will be emitted only if overflowing
11488 by at least strlen (dest) + 1 bytes. */
11489 case BUILT_IN_STRCAT_CHK
:
11490 len
= CALL_EXPR_ARG (exp
, 1);
11491 size
= CALL_EXPR_ARG (exp
, 2);
11494 case BUILT_IN_STRNCAT_CHK
:
11495 case BUILT_IN_STRNCPY_CHK
:
11496 case BUILT_IN_STPNCPY_CHK
:
11497 len
= CALL_EXPR_ARG (exp
, 2);
11498 size
= CALL_EXPR_ARG (exp
, 3);
11500 case BUILT_IN_SNPRINTF_CHK
:
11501 case BUILT_IN_VSNPRINTF_CHK
:
11502 len
= CALL_EXPR_ARG (exp
, 1);
11503 size
= CALL_EXPR_ARG (exp
, 3);
11506 gcc_unreachable ();
11512 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11517 len
= c_strlen (len
, 1);
11518 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11521 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11523 tree src
= CALL_EXPR_ARG (exp
, 1);
11524 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11526 src
= c_strlen (src
, 1);
11527 if (! src
|| ! tree_fits_uhwi_p (src
))
11529 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11530 exp
, get_callee_fndecl (exp
));
11533 else if (tree_int_cst_lt (src
, size
))
11536 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11539 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11540 exp
, get_callee_fndecl (exp
));
11543 /* Emit warning if a buffer overflow is detected at compile time
11544 in __sprintf_chk/__vsprintf_chk calls. */
11547 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11549 tree size
, len
, fmt
;
11550 const char *fmt_str
;
11551 int nargs
= call_expr_nargs (exp
);
11553 /* Verify the required arguments in the original call. */
11557 size
= CALL_EXPR_ARG (exp
, 2);
11558 fmt
= CALL_EXPR_ARG (exp
, 3);
11560 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11563 /* Check whether the format is a literal string constant. */
11564 fmt_str
= c_getstr (fmt
);
11565 if (fmt_str
== NULL
)
11568 if (!init_target_chars ())
11571 /* If the format doesn't contain % args or %%, we know its size. */
11572 if (strchr (fmt_str
, target_percent
) == 0)
11573 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11574 /* If the format is "%s" and first ... argument is a string literal,
11576 else if (fcode
== BUILT_IN_SPRINTF_CHK
11577 && strcmp (fmt_str
, target_percent_s
) == 0)
11583 arg
= CALL_EXPR_ARG (exp
, 4);
11584 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11587 len
= c_strlen (arg
, 1);
11588 if (!len
|| ! tree_fits_uhwi_p (len
))
11594 if (! tree_int_cst_lt (len
, size
))
11595 warning_at (tree_nonartificial_location (exp
),
11596 0, "%Kcall to %D will always overflow destination buffer",
11597 exp
, get_callee_fndecl (exp
));
11600 /* Emit warning if a free is called with address of a variable. */
11603 maybe_emit_free_warning (tree exp
)
11605 tree arg
= CALL_EXPR_ARG (exp
, 0);
11608 if (TREE_CODE (arg
) != ADDR_EXPR
)
11611 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11612 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11615 if (SSA_VAR_P (arg
))
11616 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11617 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11619 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11620 "%Kattempt to free a non-heap object", exp
);
11623 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11627 fold_builtin_object_size (tree ptr
, tree ost
)
11629 unsigned HOST_WIDE_INT bytes
;
11630 int object_size_type
;
11632 if (!validate_arg (ptr
, POINTER_TYPE
)
11633 || !validate_arg (ost
, INTEGER_TYPE
))
11638 if (TREE_CODE (ost
) != INTEGER_CST
11639 || tree_int_cst_sgn (ost
) < 0
11640 || compare_tree_int (ost
, 3) > 0)
11643 object_size_type
= tree_to_shwi (ost
);
11645 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11646 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11647 and (size_t) 0 for types 2 and 3. */
11648 if (TREE_SIDE_EFFECTS (ptr
))
11649 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11651 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11653 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11654 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11655 return build_int_cstu (size_type_node
, bytes
);
11657 else if (TREE_CODE (ptr
) == SSA_NAME
)
11659 /* If object size is not known yet, delay folding until
11660 later. Maybe subsequent passes will help determining
11662 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11663 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11664 && wi::fits_to_tree_p (bytes
, size_type_node
))
11665 return build_int_cstu (size_type_node
, bytes
);
11671 /* Builtins with folding operations that operate on "..." arguments
11672 need special handling; we need to store the arguments in a convenient
11673 data structure before attempting any folding. Fortunately there are
11674 only a few builtins that fall into this category. FNDECL is the
11675 function, EXP is the CALL_EXPR for the call. */
11678 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11680 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11681 tree ret
= NULL_TREE
;
11685 case BUILT_IN_FPCLASSIFY
:
11686 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11694 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11695 SET_EXPR_LOCATION (ret
, loc
);
11696 TREE_NO_WARNING (ret
) = 1;
11702 /* Initialize format string characters in the target charset. */
11705 init_target_chars (void)
11710 target_newline
= lang_hooks
.to_target_charset ('\n');
11711 target_percent
= lang_hooks
.to_target_charset ('%');
11712 target_c
= lang_hooks
.to_target_charset ('c');
11713 target_s
= lang_hooks
.to_target_charset ('s');
11714 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11718 target_percent_c
[0] = target_percent
;
11719 target_percent_c
[1] = target_c
;
11720 target_percent_c
[2] = '\0';
11722 target_percent_s
[0] = target_percent
;
11723 target_percent_s
[1] = target_s
;
11724 target_percent_s
[2] = '\0';
11726 target_percent_s_newline
[0] = target_percent
;
11727 target_percent_s_newline
[1] = target_s
;
11728 target_percent_s_newline
[2] = target_newline
;
11729 target_percent_s_newline
[3] = '\0';
11736 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11737 and no overflow/underflow occurred. INEXACT is true if M was not
11738 exactly calculated. TYPE is the tree type for the result. This
11739 function assumes that you cleared the MPFR flags and then
11740 calculated M to see if anything subsequently set a flag prior to
11741 entering this function. Return NULL_TREE if any checks fail. */
11744 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11746 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11747 overflow/underflow occurred. If -frounding-math, proceed iff the
11748 result of calling FUNC was exact. */
11749 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11750 && (!flag_rounding_math
|| !inexact
))
11752 REAL_VALUE_TYPE rr
;
11754 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11755 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11756 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11757 but the mpft_t is not, then we underflowed in the
11759 if (real_isfinite (&rr
)
11760 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11762 REAL_VALUE_TYPE rmode
;
11764 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11765 /* Proceed iff the specified mode can hold the value. */
11766 if (real_identical (&rmode
, &rr
))
11767 return build_real (type
, rmode
);
11773 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11774 number and no overflow/underflow occurred. INEXACT is true if M
11775 was not exactly calculated. TYPE is the tree type for the result.
11776 This function assumes that you cleared the MPFR flags and then
11777 calculated M to see if anything subsequently set a flag prior to
11778 entering this function. Return NULL_TREE if any checks fail, if
11779 FORCE_CONVERT is true, then bypass the checks. */
11782 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11784 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11785 overflow/underflow occurred. If -frounding-math, proceed iff the
11786 result of calling FUNC was exact. */
11788 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11789 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11790 && (!flag_rounding_math
|| !inexact
)))
11792 REAL_VALUE_TYPE re
, im
;
11794 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11795 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11796 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11797 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11798 but the mpft_t is not, then we underflowed in the
11801 || (real_isfinite (&re
) && real_isfinite (&im
)
11802 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11803 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11805 REAL_VALUE_TYPE re_mode
, im_mode
;
11807 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11808 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11809 /* Proceed iff the specified mode can hold the value. */
11811 || (real_identical (&re_mode
, &re
)
11812 && real_identical (&im_mode
, &im
)))
11813 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11814 build_real (TREE_TYPE (type
), im_mode
));
11820 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11821 FUNC on it and return the resulting value as a tree with type TYPE.
11822 If MIN and/or MAX are not NULL, then the supplied ARG must be
11823 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11824 acceptable values, otherwise they are not. The mpfr precision is
11825 set to the precision of TYPE. We assume that function FUNC returns
11826 zero if the result could be calculated exactly within the requested
11830 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11831 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11834 tree result
= NULL_TREE
;
11838 /* To proceed, MPFR must exactly represent the target floating point
11839 format, which only happens when the target base equals two. */
11840 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11841 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11843 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11845 if (real_isfinite (ra
)
11846 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11847 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11849 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11850 const int prec
= fmt
->p
;
11851 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11855 mpfr_init2 (m
, prec
);
11856 mpfr_from_real (m
, ra
, GMP_RNDN
);
11857 mpfr_clear_flags ();
11858 inexact
= func (m
, m
, rnd
);
11859 result
= do_mpfr_ckconv (m
, type
, inexact
);
11867 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11868 FUNC on it and return the resulting value as a tree with type TYPE.
11869 The mpfr precision is set to the precision of TYPE. We assume that
11870 function FUNC returns zero if the result could be calculated
11871 exactly within the requested precision. */
11874 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11875 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11877 tree result
= NULL_TREE
;
11882 /* To proceed, MPFR must exactly represent the target floating point
11883 format, which only happens when the target base equals two. */
11884 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11885 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11886 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11888 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11889 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11891 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11893 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11894 const int prec
= fmt
->p
;
11895 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11899 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11900 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11901 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11902 mpfr_clear_flags ();
11903 inexact
= func (m1
, m1
, m2
, rnd
);
11904 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11905 mpfr_clears (m1
, m2
, NULL
);
11912 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11913 FUNC on it and return the resulting value as a tree with type TYPE.
11914 The mpfr precision is set to the precision of TYPE. We assume that
11915 function FUNC returns zero if the result could be calculated
11916 exactly within the requested precision. */
11919 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11920 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11922 tree result
= NULL_TREE
;
11928 /* To proceed, MPFR must exactly represent the target floating point
11929 format, which only happens when the target base equals two. */
11930 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11931 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11932 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11933 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11935 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11936 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11937 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11939 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11941 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11942 const int prec
= fmt
->p
;
11943 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11947 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11948 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11949 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11950 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11951 mpfr_clear_flags ();
11952 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11953 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11954 mpfr_clears (m1
, m2
, m3
, NULL
);
11961 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11962 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11963 If ARG_SINP and ARG_COSP are NULL then the result is returned
11964 as a complex value.
11965 The type is taken from the type of ARG and is used for setting the
11966 precision of the calculation and results. */
11969 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11971 tree
const type
= TREE_TYPE (arg
);
11972 tree result
= NULL_TREE
;
11976 /* To proceed, MPFR must exactly represent the target floating point
11977 format, which only happens when the target base equals two. */
11978 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11979 && TREE_CODE (arg
) == REAL_CST
11980 && !TREE_OVERFLOW (arg
))
11982 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11984 if (real_isfinite (ra
))
11986 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11987 const int prec
= fmt
->p
;
11988 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11989 tree result_s
, result_c
;
11993 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11994 mpfr_from_real (m
, ra
, GMP_RNDN
);
11995 mpfr_clear_flags ();
11996 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11997 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11998 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11999 mpfr_clears (m
, ms
, mc
, NULL
);
12000 if (result_s
&& result_c
)
12002 /* If we are to return in a complex value do so. */
12003 if (!arg_sinp
&& !arg_cosp
)
12004 return build_complex (build_complex_type (type
),
12005 result_c
, result_s
);
12007 /* Dereference the sin/cos pointer arguments. */
12008 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12009 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12010 /* Proceed if valid pointer type were passed in. */
12011 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12012 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12014 /* Set the values. */
12015 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12017 TREE_SIDE_EFFECTS (result_s
) = 1;
12018 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12020 TREE_SIDE_EFFECTS (result_c
) = 1;
12021 /* Combine the assignments into a compound expr. */
12022 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12023 result_s
, result_c
));
12031 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12032 two-argument mpfr order N Bessel function FUNC on them and return
12033 the resulting value as a tree with type TYPE. The mpfr precision
12034 is set to the precision of TYPE. We assume that function FUNC
12035 returns zero if the result could be calculated exactly within the
12036 requested precision. */
12038 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12039 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12040 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12042 tree result
= NULL_TREE
;
12047 /* To proceed, MPFR must exactly represent the target floating point
12048 format, which only happens when the target base equals two. */
12049 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12050 && tree_fits_shwi_p (arg1
)
12051 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12053 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12054 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12057 && real_isfinite (ra
)
12058 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12060 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12061 const int prec
= fmt
->p
;
12062 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12066 mpfr_init2 (m
, prec
);
12067 mpfr_from_real (m
, ra
, GMP_RNDN
);
12068 mpfr_clear_flags ();
12069 inexact
= func (m
, n
, m
, rnd
);
12070 result
= do_mpfr_ckconv (m
, type
, inexact
);
12078 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12079 the pointer *(ARG_QUO) and return the result. The type is taken
12080 from the type of ARG0 and is used for setting the precision of the
12081 calculation and results. */
12084 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12086 tree
const type
= TREE_TYPE (arg0
);
12087 tree result
= NULL_TREE
;
12092 /* To proceed, MPFR must exactly represent the target floating point
12093 format, which only happens when the target base equals two. */
12094 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12095 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12096 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12098 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12099 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12101 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12103 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12104 const int prec
= fmt
->p
;
12105 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12110 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12111 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12112 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12113 mpfr_clear_flags ();
12114 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12115 /* Remquo is independent of the rounding mode, so pass
12116 inexact=0 to do_mpfr_ckconv(). */
12117 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12118 mpfr_clears (m0
, m1
, NULL
);
12121 /* MPFR calculates quo in the host's long so it may
12122 return more bits in quo than the target int can hold
12123 if sizeof(host long) > sizeof(target int). This can
12124 happen even for native compilers in LP64 mode. In
12125 these cases, modulo the quo value with the largest
12126 number that the target int can hold while leaving one
12127 bit for the sign. */
12128 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12129 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12131 /* Dereference the quo pointer argument. */
12132 arg_quo
= build_fold_indirect_ref (arg_quo
);
12133 /* Proceed iff a valid pointer type was passed in. */
12134 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12136 /* Set the value. */
12138 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12139 build_int_cst (TREE_TYPE (arg_quo
),
12141 TREE_SIDE_EFFECTS (result_quo
) = 1;
12142 /* Combine the quo assignment with the rem. */
12143 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12144 result_quo
, result_rem
));
12152 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12153 resulting value as a tree with type TYPE. The mpfr precision is
12154 set to the precision of TYPE. We assume that this mpfr function
12155 returns zero if the result could be calculated exactly within the
12156 requested precision. In addition, the integer pointer represented
12157 by ARG_SG will be dereferenced and set to the appropriate signgam
12161 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12163 tree result
= NULL_TREE
;
12167 /* To proceed, MPFR must exactly represent the target floating point
12168 format, which only happens when the target base equals two. Also
12169 verify ARG is a constant and that ARG_SG is an int pointer. */
12170 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12171 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12172 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12173 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12175 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12177 /* In addition to NaN and Inf, the argument cannot be zero or a
12178 negative integer. */
12179 if (real_isfinite (ra
)
12180 && ra
->cl
!= rvc_zero
12181 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12183 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12184 const int prec
= fmt
->p
;
12185 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12190 mpfr_init2 (m
, prec
);
12191 mpfr_from_real (m
, ra
, GMP_RNDN
);
12192 mpfr_clear_flags ();
12193 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12194 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12200 /* Dereference the arg_sg pointer argument. */
12201 arg_sg
= build_fold_indirect_ref (arg_sg
);
12202 /* Assign the signgam value into *arg_sg. */
12203 result_sg
= fold_build2 (MODIFY_EXPR
,
12204 TREE_TYPE (arg_sg
), arg_sg
,
12205 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12206 TREE_SIDE_EFFECTS (result_sg
) = 1;
12207 /* Combine the signgam assignment with the lgamma result. */
12208 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12209 result_sg
, result_lg
));
12217 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12218 function FUNC on it and return the resulting value as a tree with
12219 type TYPE. The mpfr precision is set to the precision of TYPE. We
12220 assume that function FUNC returns zero if the result could be
12221 calculated exactly within the requested precision. */
12224 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12226 tree result
= NULL_TREE
;
12230 /* To proceed, MPFR must exactly represent the target floating point
12231 format, which only happens when the target base equals two. */
12232 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12234 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12236 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12237 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12239 if (real_isfinite (re
) && real_isfinite (im
))
12241 const struct real_format
*const fmt
=
12242 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12243 const int prec
= fmt
->p
;
12244 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12245 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12249 mpc_init2 (m
, prec
);
12250 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12251 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12252 mpfr_clear_flags ();
12253 inexact
= func (m
, m
, crnd
);
12254 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12262 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12263 mpc function FUNC on it and return the resulting value as a tree
12264 with type TYPE. The mpfr precision is set to the precision of
12265 TYPE. We assume that function FUNC returns zero if the result
12266 could be calculated exactly within the requested precision. If
12267 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12268 in the arguments and/or results. */
12271 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12272 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12274 tree result
= NULL_TREE
;
12279 /* To proceed, MPFR must exactly represent the target floating point
12280 format, which only happens when the target base equals two. */
12281 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12283 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12284 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12285 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12287 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12288 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12289 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12290 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12293 || (real_isfinite (re0
) && real_isfinite (im0
)
12294 && real_isfinite (re1
) && real_isfinite (im1
)))
12296 const struct real_format
*const fmt
=
12297 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12298 const int prec
= fmt
->p
;
12299 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12300 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12304 mpc_init2 (m0
, prec
);
12305 mpc_init2 (m1
, prec
);
12306 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12307 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12308 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12309 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12310 mpfr_clear_flags ();
12311 inexact
= func (m0
, m0
, m1
, crnd
);
12312 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12321 /* A wrapper function for builtin folding that prevents warnings for
12322 "statement without effect" and the like, caused by removing the
12323 call node earlier than the warning is generated. */
12326 fold_call_stmt (gcall
*stmt
, bool ignore
)
12328 tree ret
= NULL_TREE
;
12329 tree fndecl
= gimple_call_fndecl (stmt
);
12330 location_t loc
= gimple_location (stmt
);
12332 && TREE_CODE (fndecl
) == FUNCTION_DECL
12333 && DECL_BUILT_IN (fndecl
)
12334 && !gimple_call_va_arg_pack_p (stmt
))
12336 int nargs
= gimple_call_num_args (stmt
);
12337 tree
*args
= (nargs
> 0
12338 ? gimple_call_arg_ptr (stmt
, 0)
12339 : &error_mark_node
);
12341 if (avoid_folding_inline_builtin (fndecl
))
12343 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12345 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12349 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12352 /* Propagate location information from original call to
12353 expansion of builtin. Otherwise things like
12354 maybe_emit_chk_warning, that operate on the expansion
12355 of a builtin, will use the wrong location information. */
12356 if (gimple_has_location (stmt
))
12358 tree realret
= ret
;
12359 if (TREE_CODE (ret
) == NOP_EXPR
)
12360 realret
= TREE_OPERAND (ret
, 0);
12361 if (CAN_HAVE_LOCATION_P (realret
)
12362 && !EXPR_HAS_LOCATION (realret
))
12363 SET_EXPR_LOCATION (realret
, loc
);
12373 /* Look up the function in builtin_decl that corresponds to DECL
12374 and set ASMSPEC as its user assembler name. DECL must be a
12375 function decl that declares a builtin. */
12378 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12381 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12382 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12385 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12386 set_user_assembler_name (builtin
, asmspec
);
12387 switch (DECL_FUNCTION_CODE (decl
))
12389 case BUILT_IN_MEMCPY
:
12390 init_block_move_fn (asmspec
);
12391 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12393 case BUILT_IN_MEMSET
:
12394 init_block_clear_fn (asmspec
);
12395 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12397 case BUILT_IN_MEMMOVE
:
12398 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12400 case BUILT_IN_MEMCMP
:
12401 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12403 case BUILT_IN_ABORT
:
12404 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12407 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12409 set_user_assembler_libfunc ("ffs", asmspec
);
12410 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12411 MODE_INT
, 0), "ffs");
12419 /* Return true if DECL is a builtin that expands to a constant or similarly
12422 is_simple_builtin (tree decl
)
12424 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12425 switch (DECL_FUNCTION_CODE (decl
))
12427 /* Builtins that expand to constants. */
12428 case BUILT_IN_CONSTANT_P
:
12429 case BUILT_IN_EXPECT
:
12430 case BUILT_IN_OBJECT_SIZE
:
12431 case BUILT_IN_UNREACHABLE
:
12432 /* Simple register moves or loads from stack. */
12433 case BUILT_IN_ASSUME_ALIGNED
:
12434 case BUILT_IN_RETURN_ADDRESS
:
12435 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12436 case BUILT_IN_FROB_RETURN_ADDR
:
12437 case BUILT_IN_RETURN
:
12438 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12439 case BUILT_IN_FRAME_ADDRESS
:
12440 case BUILT_IN_VA_END
:
12441 case BUILT_IN_STACK_SAVE
:
12442 case BUILT_IN_STACK_RESTORE
:
12443 /* Exception state returns or moves registers around. */
12444 case BUILT_IN_EH_FILTER
:
12445 case BUILT_IN_EH_POINTER
:
12446 case BUILT_IN_EH_COPY_VALUES
:
12456 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12457 most probably expanded inline into reasonably simple code. This is a
12458 superset of is_simple_builtin. */
12460 is_inexpensive_builtin (tree decl
)
12464 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12466 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12467 switch (DECL_FUNCTION_CODE (decl
))
12470 case BUILT_IN_ALLOCA
:
12471 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12472 case BUILT_IN_BSWAP16
:
12473 case BUILT_IN_BSWAP32
:
12474 case BUILT_IN_BSWAP64
:
12476 case BUILT_IN_CLZIMAX
:
12477 case BUILT_IN_CLZL
:
12478 case BUILT_IN_CLZLL
:
12480 case BUILT_IN_CTZIMAX
:
12481 case BUILT_IN_CTZL
:
12482 case BUILT_IN_CTZLL
:
12484 case BUILT_IN_FFSIMAX
:
12485 case BUILT_IN_FFSL
:
12486 case BUILT_IN_FFSLL
:
12487 case BUILT_IN_IMAXABS
:
12488 case BUILT_IN_FINITE
:
12489 case BUILT_IN_FINITEF
:
12490 case BUILT_IN_FINITEL
:
12491 case BUILT_IN_FINITED32
:
12492 case BUILT_IN_FINITED64
:
12493 case BUILT_IN_FINITED128
:
12494 case BUILT_IN_FPCLASSIFY
:
12495 case BUILT_IN_ISFINITE
:
12496 case BUILT_IN_ISINF_SIGN
:
12497 case BUILT_IN_ISINF
:
12498 case BUILT_IN_ISINFF
:
12499 case BUILT_IN_ISINFL
:
12500 case BUILT_IN_ISINFD32
:
12501 case BUILT_IN_ISINFD64
:
12502 case BUILT_IN_ISINFD128
:
12503 case BUILT_IN_ISNAN
:
12504 case BUILT_IN_ISNANF
:
12505 case BUILT_IN_ISNANL
:
12506 case BUILT_IN_ISNAND32
:
12507 case BUILT_IN_ISNAND64
:
12508 case BUILT_IN_ISNAND128
:
12509 case BUILT_IN_ISNORMAL
:
12510 case BUILT_IN_ISGREATER
:
12511 case BUILT_IN_ISGREATEREQUAL
:
12512 case BUILT_IN_ISLESS
:
12513 case BUILT_IN_ISLESSEQUAL
:
12514 case BUILT_IN_ISLESSGREATER
:
12515 case BUILT_IN_ISUNORDERED
:
12516 case BUILT_IN_VA_ARG_PACK
:
12517 case BUILT_IN_VA_ARG_PACK_LEN
:
12518 case BUILT_IN_VA_COPY
:
12519 case BUILT_IN_TRAP
:
12520 case BUILT_IN_SAVEREGS
:
12521 case BUILT_IN_POPCOUNTL
:
12522 case BUILT_IN_POPCOUNTLL
:
12523 case BUILT_IN_POPCOUNTIMAX
:
12524 case BUILT_IN_POPCOUNT
:
12525 case BUILT_IN_PARITYL
:
12526 case BUILT_IN_PARITYLL
:
12527 case BUILT_IN_PARITYIMAX
:
12528 case BUILT_IN_PARITY
:
12529 case BUILT_IN_LABS
:
12530 case BUILT_IN_LLABS
:
12531 case BUILT_IN_PREFETCH
:
12532 case BUILT_IN_ACC_ON_DEVICE
:
12536 return is_simple_builtin (decl
);