1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
33 #include "tree-object-size.h"
36 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
47 #include "insn-config.h"
54 #include "insn-codes.h"
59 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "tree-ssanames.h"
65 #include "value-prof.h"
66 #include "diagnostic-core.h"
70 #include "lto-streamer.h"
72 #include "tree-chkp.h"
74 #include "gomp-constants.h"
77 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
79 struct target_builtins default_target_builtins
;
81 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
84 /* Define the names of the builtin function types and codes. */
85 const char *const built_in_class_names
[BUILT_IN_LAST
]
86 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
88 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
89 const char * built_in_names
[(int) END_BUILTINS
] =
91 #include "builtins.def"
95 /* Setup an array of builtin_info_type, make sure each element decl is
96 initialized to NULL_TREE. */
97 builtin_info_type builtin_info
[(int)END_BUILTINS
];
99 /* Non-zero if __builtin_constant_p should be folded right away. */
100 bool force_folding_builtin_constant_p
;
102 static rtx
c_readstr (const char *, machine_mode
);
103 static int target_char_cast (tree
, char *);
104 static rtx
get_memory_rtx (tree
, tree
);
105 static int apply_args_size (void);
106 static int apply_result_size (void);
107 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
108 static rtx
result_vector (int, rtx
);
110 static void expand_builtin_prefetch (tree
);
111 static rtx
expand_builtin_apply_args (void);
112 static rtx
expand_builtin_apply_args_1 (void);
113 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
114 static void expand_builtin_return (rtx
);
115 static enum type_class
type_to_class (tree
);
116 static rtx
expand_builtin_classify_type (tree
);
117 static void expand_errno_check (tree
, rtx
);
118 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
119 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
120 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
121 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
122 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
123 static rtx
expand_builtin_sincos (tree
);
124 static rtx
expand_builtin_cexpi (tree
, rtx
);
125 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
126 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
127 static rtx
expand_builtin_next_arg (void);
128 static rtx
expand_builtin_va_start (tree
);
129 static rtx
expand_builtin_va_end (tree
);
130 static rtx
expand_builtin_va_copy (tree
);
131 static rtx
expand_builtin_memcmp (tree
, rtx
, machine_mode
);
132 static rtx
expand_builtin_strcmp (tree
, rtx
);
133 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
134 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
135 static rtx
expand_builtin_memcpy (tree
, rtx
);
136 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
137 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
138 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
139 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
140 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
141 machine_mode
, int, tree
);
142 static rtx
expand_builtin_strcpy (tree
, rtx
);
143 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
144 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_strncpy (tree
, rtx
);
146 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
147 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
148 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
149 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
150 static rtx
expand_builtin_bzero (tree
);
151 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
152 static rtx
expand_builtin_alloca (tree
, bool);
153 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
154 static rtx
expand_builtin_frame_address (tree
, tree
);
155 static tree
stabilize_va_list_loc (location_t
, tree
, int);
156 static rtx
expand_builtin_expect (tree
, rtx
);
157 static tree
fold_builtin_constant_p (tree
);
158 static tree
fold_builtin_classify_type (tree
);
159 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
160 static tree
fold_builtin_inf (location_t
, tree
, int);
161 static tree
fold_builtin_nan (tree
, tree
, int);
162 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
163 static bool validate_arg (const_tree
, enum tree_code code
);
164 static bool integer_valued_real_p (tree
);
165 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
166 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
167 static rtx
expand_builtin_signbit (tree
, rtx
);
168 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
169 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
170 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
171 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
172 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
174 static tree
fold_builtin_tan (tree
, tree
);
175 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
176 static tree
fold_builtin_floor (location_t
, tree
, tree
);
177 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
178 static tree
fold_builtin_round (location_t
, tree
, tree
);
179 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
180 static tree
fold_builtin_bitop (tree
, tree
);
181 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
182 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
183 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
184 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
185 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
186 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
187 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
188 static tree
fold_builtin_isascii (location_t
, tree
);
189 static tree
fold_builtin_toascii (location_t
, tree
);
190 static tree
fold_builtin_isdigit (location_t
, tree
);
191 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
192 static tree
fold_builtin_abs (location_t
, tree
, tree
);
193 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
195 static tree
fold_builtin_0 (location_t
, tree
);
196 static tree
fold_builtin_1 (location_t
, tree
, tree
);
197 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
198 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
199 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
201 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
202 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
203 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
204 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
205 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
207 static rtx
expand_builtin_object_size (tree
);
208 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
209 enum built_in_function
);
210 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
211 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
212 static void maybe_emit_free_warning (tree
);
213 static tree
fold_builtin_object_size (tree
, tree
);
215 unsigned HOST_WIDE_INT target_newline
;
216 unsigned HOST_WIDE_INT target_percent
;
217 static unsigned HOST_WIDE_INT target_c
;
218 static unsigned HOST_WIDE_INT target_s
;
219 char target_percent_c
[3];
220 char target_percent_s
[3];
221 char target_percent_s_newline
[4];
222 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
223 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
224 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
225 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
226 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
227 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
228 static tree
do_mpfr_sincos (tree
, tree
, tree
);
229 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
230 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
231 const REAL_VALUE_TYPE
*, bool);
232 static tree
do_mpfr_remquo (tree
, tree
, tree
);
233 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
234 static void expand_builtin_sync_synchronize (void);
236 /* Return true if NAME starts with __builtin_ or __sync_. */
239 is_builtin_name (const char *name
)
241 if (strncmp (name
, "__builtin_", 10) == 0)
243 if (strncmp (name
, "__sync_", 7) == 0)
245 if (strncmp (name
, "__atomic_", 9) == 0)
248 && (!strcmp (name
, "__cilkrts_detach")
249 || !strcmp (name
, "__cilkrts_pop_frame")))
255 /* Return true if DECL is a function symbol representing a built-in. */
258 is_builtin_fn (tree decl
)
260 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
263 /* Return true if NODE should be considered for inline expansion regardless
264 of the optimization level. This means whenever a function is invoked with
265 its "internal" name, which normally contains the prefix "__builtin". */
268 called_as_built_in (tree node
)
270 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
271 we want the name used to call the function, not the name it
273 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
274 return is_builtin_name (name
);
277 /* Compute values M and N such that M divides (address of EXP - N) and such
278 that N < M. If these numbers can be determined, store M in alignp and N in
279 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
280 *alignp and any bit-offset to *bitposp.
282 Note that the address (and thus the alignment) computed here is based
283 on the address to which a symbol resolves, whereas DECL_ALIGN is based
284 on the address at which an object is actually located. These two
285 addresses are not always the same. For example, on ARM targets,
286 the address &foo of a Thumb function foo() has the lowest bit set,
287 whereas foo() itself starts on an even address.
289 If ADDR_P is true we are taking the address of the memory reference EXP
290 and thus cannot rely on the access taking place. */
293 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
294 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
296 HOST_WIDE_INT bitsize
, bitpos
;
299 int unsignedp
, volatilep
;
300 unsigned int align
= BITS_PER_UNIT
;
301 bool known_alignment
= false;
303 /* Get the innermost object and the constant (bitpos) and possibly
304 variable (offset) offset of the access. */
305 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
306 &mode
, &unsignedp
, &volatilep
, true);
308 /* Extract alignment information from the innermost object and
309 possibly adjust bitpos and offset. */
310 if (TREE_CODE (exp
) == FUNCTION_DECL
)
312 /* Function addresses can encode extra information besides their
313 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
314 allows the low bit to be used as a virtual bit, we know
315 that the address itself must be at least 2-byte aligned. */
316 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
317 align
= 2 * BITS_PER_UNIT
;
319 else if (TREE_CODE (exp
) == LABEL_DECL
)
321 else if (TREE_CODE (exp
) == CONST_DECL
)
323 /* The alignment of a CONST_DECL is determined by its initializer. */
324 exp
= DECL_INITIAL (exp
);
325 align
= TYPE_ALIGN (TREE_TYPE (exp
));
326 #ifdef CONSTANT_ALIGNMENT
327 if (CONSTANT_CLASS_P (exp
))
328 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
330 known_alignment
= true;
332 else if (DECL_P (exp
))
334 align
= DECL_ALIGN (exp
);
335 known_alignment
= true;
337 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
339 align
= TYPE_ALIGN (TREE_TYPE (exp
));
341 else if (TREE_CODE (exp
) == INDIRECT_REF
342 || TREE_CODE (exp
) == MEM_REF
343 || TREE_CODE (exp
) == TARGET_MEM_REF
)
345 tree addr
= TREE_OPERAND (exp
, 0);
347 unsigned HOST_WIDE_INT ptr_bitpos
;
348 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
350 /* If the address is explicitely aligned, handle that. */
351 if (TREE_CODE (addr
) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
354 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
355 ptr_bitmask
*= BITS_PER_UNIT
;
356 align
= ptr_bitmask
& -ptr_bitmask
;
357 addr
= TREE_OPERAND (addr
, 0);
361 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
362 align
= MAX (ptr_align
, align
);
364 /* Re-apply explicit alignment to the bitpos. */
365 ptr_bitpos
&= ptr_bitmask
;
367 /* The alignment of the pointer operand in a TARGET_MEM_REF
368 has to take the variable offset parts into account. */
369 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
373 unsigned HOST_WIDE_INT step
= 1;
375 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
376 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
378 if (TMR_INDEX2 (exp
))
379 align
= BITS_PER_UNIT
;
380 known_alignment
= false;
383 /* When EXP is an actual memory reference then we can use
384 TYPE_ALIGN of a pointer indirection to derive alignment.
385 Do so only if get_pointer_alignment_1 did not reveal absolute
386 alignment knowledge and if using that alignment would
387 improve the situation. */
388 if (!addr_p
&& !known_alignment
389 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
390 align
= TYPE_ALIGN (TREE_TYPE (exp
));
393 /* Else adjust bitpos accordingly. */
394 bitpos
+= ptr_bitpos
;
395 if (TREE_CODE (exp
) == MEM_REF
396 || TREE_CODE (exp
) == TARGET_MEM_REF
)
397 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
400 else if (TREE_CODE (exp
) == STRING_CST
)
402 /* STRING_CST are the only constant objects we allow to be not
403 wrapped inside a CONST_DECL. */
404 align
= TYPE_ALIGN (TREE_TYPE (exp
));
405 #ifdef CONSTANT_ALIGNMENT
406 if (CONSTANT_CLASS_P (exp
))
407 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
409 known_alignment
= true;
412 /* If there is a non-constant offset part extract the maximum
413 alignment that can prevail. */
416 unsigned int trailing_zeros
= tree_ctz (offset
);
417 if (trailing_zeros
< HOST_BITS_PER_INT
)
419 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
421 align
= MIN (align
, inner
);
426 *bitposp
= bitpos
& (*alignp
- 1);
427 return known_alignment
;
430 /* For a memory reference expression EXP compute values M and N such that M
431 divides (&EXP - N) and such that N < M. If these numbers can be determined,
432 store M in alignp and N in *BITPOSP and return true. Otherwise return false
433 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
436 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
437 unsigned HOST_WIDE_INT
*bitposp
)
439 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
442 /* Return the alignment in bits of EXP, an object. */
445 get_object_alignment (tree exp
)
447 unsigned HOST_WIDE_INT bitpos
= 0;
450 get_object_alignment_1 (exp
, &align
, &bitpos
);
452 /* align and bitpos now specify known low bits of the pointer.
453 ptr & (align - 1) == bitpos. */
456 align
= (bitpos
& -bitpos
);
460 /* For a pointer valued expression EXP compute values M and N such that M
461 divides (EXP - N) and such that N < M. If these numbers can be determined,
462 store M in alignp and N in *BITPOSP and return true. Return false if
463 the results are just a conservative approximation.
465 If EXP is not a pointer, false is returned too. */
468 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
469 unsigned HOST_WIDE_INT
*bitposp
)
473 if (TREE_CODE (exp
) == ADDR_EXPR
)
474 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
475 alignp
, bitposp
, true);
476 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
479 unsigned HOST_WIDE_INT bitpos
;
480 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
482 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
483 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
486 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
487 if (trailing_zeros
< HOST_BITS_PER_INT
)
489 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
491 align
= MIN (align
, inner
);
495 *bitposp
= bitpos
& (align
- 1);
498 else if (TREE_CODE (exp
) == SSA_NAME
499 && POINTER_TYPE_P (TREE_TYPE (exp
)))
501 unsigned int ptr_align
, ptr_misalign
;
502 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
504 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
506 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
507 *alignp
= ptr_align
* BITS_PER_UNIT
;
508 /* We cannot really tell whether this result is an approximation. */
514 *alignp
= BITS_PER_UNIT
;
518 else if (TREE_CODE (exp
) == INTEGER_CST
)
520 *alignp
= BIGGEST_ALIGNMENT
;
521 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
522 & (BIGGEST_ALIGNMENT
- 1));
527 *alignp
= BITS_PER_UNIT
;
531 /* Return the alignment in bits of EXP, a pointer valued expression.
532 The alignment returned is, by default, the alignment of the thing that
533 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
535 Otherwise, look at the expression to see if we can do better, i.e., if the
536 expression is actually pointing at an object whose alignment is tighter. */
539 get_pointer_alignment (tree exp
)
541 unsigned HOST_WIDE_INT bitpos
= 0;
544 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
546 /* align and bitpos now specify known low bits of the pointer.
547 ptr & (align - 1) == bitpos. */
550 align
= (bitpos
& -bitpos
);
555 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
556 way, because it could contain a zero byte in the middle.
557 TREE_STRING_LENGTH is the size of the character array, not the string.
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
570 The value returned is of type `ssizetype'.
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
576 c_strlen (tree src
, int only_value
)
579 HOST_WIDE_INT offset
;
585 if (TREE_CODE (src
) == COND_EXPR
586 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
590 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
591 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
592 if (tree_int_cst_equal (len1
, len2
))
596 if (TREE_CODE (src
) == COMPOUND_EXPR
597 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
598 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
600 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
602 src
= string_constant (src
, &offset_node
);
606 max
= TREE_STRING_LENGTH (src
) - 1;
607 ptr
= TREE_STRING_POINTER (src
);
609 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
611 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
612 compute the offset to the following null if we don't know where to
613 start searching for it. */
616 for (i
= 0; i
< max
; i
++)
620 /* We don't know the starting offset, but we do know that the string
621 has no internal zero bytes. We can assume that the offset falls
622 within the bounds of the string; otherwise, the programmer deserves
623 what he gets. Subtract the offset from the length of the string,
624 and return that. This would perhaps not be valid if we were dealing
625 with named arrays in addition to literal string constants. */
627 return size_diffop_loc (loc
, size_int (max
), offset_node
);
630 /* We have a known offset into the string. Start searching there for
631 a null character if we can represent it as a single HOST_WIDE_INT. */
632 if (offset_node
== 0)
634 else if (! tree_fits_shwi_p (offset_node
))
637 offset
= tree_to_shwi (offset_node
);
639 /* If the offset is known to be out of bounds, warn, and call strlen at
641 if (offset
< 0 || offset
> max
)
643 /* Suppress multiple warnings for propagated constant strings. */
645 && !TREE_NO_WARNING (src
))
647 warning_at (loc
, 0, "offset outside bounds of constant string");
648 TREE_NO_WARNING (src
) = 1;
653 /* Use strlen to search for the first zero byte. Since any strings
654 constructed with build_string will have nulls appended, we win even
655 if we get handed something like (char[4])"abcd".
657 Since OFFSET is our starting index into the string, no further
658 calculation is needed. */
659 return ssize_int (strlen (ptr
+ offset
));
662 /* Return a char pointer for a C string if it is a string constant
663 or sum of string constant and integer constant. */
670 src
= string_constant (src
, &offset_node
);
674 if (offset_node
== 0)
675 return TREE_STRING_POINTER (src
);
676 else if (!tree_fits_uhwi_p (offset_node
)
677 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
680 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
683 /* Return a constant integer corresponding to target reading
684 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
687 c_readstr (const char *str
, machine_mode mode
)
691 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
693 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
694 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
695 / HOST_BITS_PER_WIDE_INT
;
697 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
698 for (i
= 0; i
< len
; i
++)
702 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
705 if (WORDS_BIG_ENDIAN
)
706 j
= GET_MODE_SIZE (mode
) - i
- 1;
707 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
708 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
709 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
713 ch
= (unsigned char) str
[i
];
714 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
717 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
718 return immed_wide_int_const (c
, mode
);
721 /* Cast a target constant CST to target CHAR and if that value fits into
722 host char type, return zero and put that value into variable pointed to by
726 target_char_cast (tree cst
, char *p
)
728 unsigned HOST_WIDE_INT val
, hostval
;
730 if (TREE_CODE (cst
) != INTEGER_CST
731 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
734 /* Do not care if it fits or not right here. */
735 val
= TREE_INT_CST_LOW (cst
);
737 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
738 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
741 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
742 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
751 /* Similar to save_expr, but assumes that arbitrary code is not executed
752 in between the multiple evaluations. In particular, we assume that a
753 non-addressable local variable will not be modified. */
756 builtin_save_expr (tree exp
)
758 if (TREE_CODE (exp
) == SSA_NAME
759 || (TREE_ADDRESSABLE (exp
) == 0
760 && (TREE_CODE (exp
) == PARM_DECL
761 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
764 return save_expr (exp
);
767 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
768 times to get the address of either a higher stack frame, or a return
769 address located within it (depending on FNDECL_CODE). */
772 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
776 #ifdef INITIAL_FRAME_ADDRESS_RTX
777 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
781 /* For a zero count with __builtin_return_address, we don't care what
782 frame address we return, because target-specific definitions will
783 override us. Therefore frame pointer elimination is OK, and using
784 the soft frame pointer is OK.
786 For a nonzero count, or a zero count with __builtin_frame_address,
787 we require a stable offset from the current frame pointer to the
788 previous one, so we must use the hard frame pointer, and
789 we must disable frame pointer elimination. */
790 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
791 tem
= frame_pointer_rtx
;
794 tem
= hard_frame_pointer_rtx
;
796 /* Tell reload not to eliminate the frame pointer. */
797 crtl
->accesses_prior_frames
= 1;
801 /* Some machines need special handling before we can access
802 arbitrary frames. For example, on the SPARC, we must first flush
803 all register windows to the stack. */
804 #ifdef SETUP_FRAME_ADDRESSES
806 SETUP_FRAME_ADDRESSES ();
809 /* On the SPARC, the return address is not in the frame, it is in a
810 register. There is no way to access it off of the current frame
811 pointer, but it can be accessed off the previous frame pointer by
812 reading the value from the register window save area. */
813 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
816 /* Scan back COUNT frames to the specified frame. */
817 for (i
= 0; i
< count
; i
++)
819 /* Assume the dynamic chain pointer is in the word that the
820 frame address points to, unless otherwise specified. */
821 #ifdef DYNAMIC_CHAIN_ADDRESS
822 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
824 tem
= memory_address (Pmode
, tem
);
825 tem
= gen_frame_mem (Pmode
, tem
);
826 tem
= copy_to_reg (tem
);
829 /* For __builtin_frame_address, return what we've got. But, on
830 the SPARC for example, we may have to add a bias. */
831 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
832 #ifdef FRAME_ADDR_RTX
833 return FRAME_ADDR_RTX (tem
);
838 /* For __builtin_return_address, get the return address from that frame. */
839 #ifdef RETURN_ADDR_RTX
840 tem
= RETURN_ADDR_RTX (count
, tem
);
842 tem
= memory_address (Pmode
,
843 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
844 tem
= gen_frame_mem (Pmode
, tem
);
849 /* Alias set used for setjmp buffer. */
850 static alias_set_type setjmp_alias_set
= -1;
852 /* Construct the leading half of a __builtin_setjmp call. Control will
853 return to RECEIVER_LABEL. This is also called directly by the SJLJ
854 exception handling code. */
857 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
859 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
863 if (setjmp_alias_set
== -1)
864 setjmp_alias_set
= new_alias_set ();
866 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
868 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
870 /* We store the frame pointer and the address of receiver_label in
871 the buffer and use the rest of it for the stack save area, which
872 is machine-dependent. */
874 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
875 set_mem_alias_set (mem
, setjmp_alias_set
);
876 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
878 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
879 GET_MODE_SIZE (Pmode
))),
880 set_mem_alias_set (mem
, setjmp_alias_set
);
882 emit_move_insn (validize_mem (mem
),
883 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
885 stack_save
= gen_rtx_MEM (sa_mode
,
886 plus_constant (Pmode
, buf_addr
,
887 2 * GET_MODE_SIZE (Pmode
)));
888 set_mem_alias_set (stack_save
, setjmp_alias_set
);
889 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
891 /* If there is further processing to do, do it. */
892 #ifdef HAVE_builtin_setjmp_setup
893 if (HAVE_builtin_setjmp_setup
)
894 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
897 /* We have a nonlocal label. */
898 cfun
->has_nonlocal_label
= 1;
901 /* Construct the trailing part of a __builtin_setjmp call. This is
902 also called directly by the SJLJ exception handling code.
903 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
906 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
910 /* Mark the FP as used when we get here, so we have to make sure it's
911 marked as used by this function. */
912 emit_use (hard_frame_pointer_rtx
);
914 /* Mark the static chain as clobbered here so life information
915 doesn't get messed up for it. */
916 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
917 if (chain
&& REG_P (chain
))
918 emit_clobber (chain
);
920 /* Now put in the code to restore the frame pointer, and argument
921 pointer, if needed. */
922 #ifdef HAVE_nonlocal_goto
923 if (! HAVE_nonlocal_goto
)
926 /* First adjust our frame pointer to its actual value. It was
927 previously set to the start of the virtual area corresponding to
928 the stacked variables when we branched here and now needs to be
929 adjusted to the actual hardware fp value.
931 Assignments to virtual registers are converted by
932 instantiate_virtual_regs into the corresponding assignment
933 to the underlying register (fp in this case) that makes
934 the original assignment true.
935 So the following insn will actually be decrementing fp by
936 STARTING_FRAME_OFFSET. */
937 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
939 /* Restoring the frame pointer also modifies the hard frame pointer.
940 Mark it used (so that the previous assignment remains live once
941 the frame pointer is eliminated) and clobbered (to represent the
942 implicit update from the assignment). */
943 emit_use (hard_frame_pointer_rtx
);
944 emit_clobber (hard_frame_pointer_rtx
);
947 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
948 if (fixed_regs
[ARG_POINTER_REGNUM
])
950 #ifdef ELIMINABLE_REGS
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
957 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
959 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
960 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
961 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
964 if (i
== ARRAY_SIZE (elim_regs
))
967 /* Now restore our arg pointer from the address at which it
968 was saved in our stack frame. */
969 emit_move_insn (crtl
->args
.internal_arg_pointer
,
970 copy_to_reg (get_arg_pointer_save_area ()));
975 #ifdef HAVE_builtin_setjmp_receiver
976 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
977 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
980 #ifdef HAVE_nonlocal_goto_receiver
981 if (HAVE_nonlocal_goto_receiver
)
982 emit_insn (gen_nonlocal_goto_receiver ());
987 /* We must not allow the code we just generated to be reordered by
988 scheduling. Specifically, the update of the frame pointer must
989 happen immediately, not later. */
990 emit_insn (gen_blockage ());
993 /* __builtin_longjmp is passed a pointer to an array of five words (not
994 all will be used on all machines). It operates similarly to the C
995 library function of the same name, but is more efficient. Much of
996 the code below is copied from the handling of non-local gotos. */
999 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1002 rtx_insn
*insn
, *last
;
1003 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1005 /* DRAP is needed for stack realign if longjmp is expanded to current
1007 if (SUPPORTS_STACK_ALIGNMENT
)
1008 crtl
->need_drap
= true;
1010 if (setjmp_alias_set
== -1)
1011 setjmp_alias_set
= new_alias_set ();
1013 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1015 buf_addr
= force_reg (Pmode
, buf_addr
);
1017 /* We require that the user must pass a second argument of 1, because
1018 that is what builtin_setjmp will return. */
1019 gcc_assert (value
== const1_rtx
);
1021 last
= get_last_insn ();
1022 #ifdef HAVE_builtin_longjmp
1023 if (HAVE_builtin_longjmp
)
1024 emit_insn (gen_builtin_longjmp (buf_addr
));
1028 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1029 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1030 GET_MODE_SIZE (Pmode
)));
1032 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1033 2 * GET_MODE_SIZE (Pmode
)));
1034 set_mem_alias_set (fp
, setjmp_alias_set
);
1035 set_mem_alias_set (lab
, setjmp_alias_set
);
1036 set_mem_alias_set (stack
, setjmp_alias_set
);
1038 /* Pick up FP, label, and SP from the block and jump. This code is
1039 from expand_goto in stmt.c; see there for detailed comments. */
1040 #ifdef HAVE_nonlocal_goto
1041 if (HAVE_nonlocal_goto
)
1042 /* We have to pass a value to the nonlocal_goto pattern that will
1043 get copied into the static_chain pointer, but it does not matter
1044 what that value is, because builtin_setjmp does not use it. */
1045 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1049 lab
= copy_to_reg (lab
);
1051 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1052 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1054 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1055 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1057 emit_use (hard_frame_pointer_rtx
);
1058 emit_use (stack_pointer_rtx
);
1059 emit_indirect_jump (lab
);
1063 /* Search backwards and mark the jump insn as a non-local goto.
1064 Note that this precludes the use of __builtin_longjmp to a
1065 __builtin_setjmp target in the same function. However, we've
1066 already cautioned the user that these functions are for
1067 internal exception handling use only. */
1068 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1070 gcc_assert (insn
!= last
);
1074 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1077 else if (CALL_P (insn
))
1083 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1085 return (iter
->i
< iter
->n
);
1088 /* This function validates the types of a function call argument list
1089 against a specified list of tree_codes. If the last specifier is a 0,
1090 that represents an ellipses, otherwise the last specifier must be a
1094 validate_arglist (const_tree callexpr
, ...)
1096 enum tree_code code
;
1099 const_call_expr_arg_iterator iter
;
1102 va_start (ap
, callexpr
);
1103 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1107 code
= (enum tree_code
) va_arg (ap
, int);
1111 /* This signifies an ellipses, any further arguments are all ok. */
1115 /* This signifies an endlink, if no arguments remain, return
1116 true, otherwise return false. */
1117 res
= !more_const_call_expr_args_p (&iter
);
1120 /* If no parameters remain or the parameter's code does not
1121 match the specified code, return false. Otherwise continue
1122 checking any remaining arguments. */
1123 arg
= next_const_call_expr_arg (&iter
);
1124 if (!validate_arg (arg
, code
))
1131 /* We need gotos here since we can only have one VA_CLOSE in a
1139 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1140 and the address of the save area. */
1143 expand_builtin_nonlocal_goto (tree exp
)
1145 tree t_label
, t_save_area
;
1146 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1149 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1152 t_label
= CALL_EXPR_ARG (exp
, 0);
1153 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1155 r_label
= expand_normal (t_label
);
1156 r_label
= convert_memory_address (Pmode
, r_label
);
1157 r_save_area
= expand_normal (t_save_area
);
1158 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1159 /* Copy the address of the save location to a register just in case it was
1160 based on the frame pointer. */
1161 r_save_area
= copy_to_reg (r_save_area
);
1162 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1163 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1164 plus_constant (Pmode
, r_save_area
,
1165 GET_MODE_SIZE (Pmode
)));
1167 crtl
->has_nonlocal_goto
= 1;
1169 #ifdef HAVE_nonlocal_goto
1170 /* ??? We no longer need to pass the static chain value, afaik. */
1171 if (HAVE_nonlocal_goto
)
1172 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1176 r_label
= copy_to_reg (r_label
);
1178 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1179 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1181 /* Restore frame pointer for containing function. */
1182 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1183 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1185 /* USE of hard_frame_pointer_rtx added for consistency;
1186 not clear if really needed. */
1187 emit_use (hard_frame_pointer_rtx
);
1188 emit_use (stack_pointer_rtx
);
1190 /* If the architecture is using a GP register, we must
1191 conservatively assume that the target function makes use of it.
1192 The prologue of functions with nonlocal gotos must therefore
1193 initialize the GP register to the appropriate value, and we
1194 must then make sure that this value is live at the point
1195 of the jump. (Note that this doesn't necessarily apply
1196 to targets with a nonlocal_goto pattern; they are free
1197 to implement it in their own way. Note also that this is
1198 a no-op if the GP register is a global invariant.) */
1199 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1200 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1201 emit_use (pic_offset_table_rtx
);
1203 emit_indirect_jump (r_label
);
1206 /* Search backwards to the jump insn and mark it as a
1208 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1212 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1215 else if (CALL_P (insn
))
1222 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1223 (not all will be used on all machines) that was passed to __builtin_setjmp.
1224 It updates the stack pointer in that block to the current value. This is
1225 also called directly by the SJLJ exception handling code. */
1228 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1230 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1232 = gen_rtx_MEM (sa_mode
,
1235 plus_constant (Pmode
, buf_addr
,
1236 2 * GET_MODE_SIZE (Pmode
))));
1238 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1241 /* Expand a call to __builtin_prefetch. For a target that does not support
1242 data prefetch, evaluate the memory address argument in case it has side
1246 expand_builtin_prefetch (tree exp
)
1248 tree arg0
, arg1
, arg2
;
1252 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1255 arg0
= CALL_EXPR_ARG (exp
, 0);
1257 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1258 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1260 nargs
= call_expr_nargs (exp
);
1262 arg1
= CALL_EXPR_ARG (exp
, 1);
1264 arg1
= integer_zero_node
;
1266 arg2
= CALL_EXPR_ARG (exp
, 2);
1268 arg2
= integer_three_node
;
1270 /* Argument 0 is an address. */
1271 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1273 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1274 if (TREE_CODE (arg1
) != INTEGER_CST
)
1276 error ("second argument to %<__builtin_prefetch%> must be a constant");
1277 arg1
= integer_zero_node
;
1279 op1
= expand_normal (arg1
);
1280 /* Argument 1 must be either zero or one. */
1281 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1283 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1288 /* Argument 2 (locality) must be a compile-time constant int. */
1289 if (TREE_CODE (arg2
) != INTEGER_CST
)
1291 error ("third argument to %<__builtin_prefetch%> must be a constant");
1292 arg2
= integer_zero_node
;
1294 op2
= expand_normal (arg2
);
1295 /* Argument 2 must be 0, 1, 2, or 3. */
1296 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1298 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1302 #ifdef HAVE_prefetch
1305 struct expand_operand ops
[3];
1307 create_address_operand (&ops
[0], op0
);
1308 create_integer_operand (&ops
[1], INTVAL (op1
));
1309 create_integer_operand (&ops
[2], INTVAL (op2
));
1310 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1315 /* Don't do anything with direct references to volatile memory, but
1316 generate code to handle other side effects. */
1317 if (!MEM_P (op0
) && side_effects_p (op0
))
1321 /* Get a MEM rtx for expression EXP which is the address of an operand
1322 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1323 the maximum length of the block of memory that might be accessed or
1327 get_memory_rtx (tree exp
, tree len
)
1329 tree orig_exp
= exp
;
1332 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1333 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1334 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1335 exp
= TREE_OPERAND (exp
, 0);
1337 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1338 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1340 /* Get an expression we can use to find the attributes to assign to MEM.
1341 First remove any nops. */
1342 while (CONVERT_EXPR_P (exp
)
1343 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1344 exp
= TREE_OPERAND (exp
, 0);
1346 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1347 (as builtin stringops may alias with anything). */
1348 exp
= fold_build2 (MEM_REF
,
1349 build_array_type (char_type_node
,
1350 build_range_type (sizetype
,
1351 size_one_node
, len
)),
1352 exp
, build_int_cst (ptr_type_node
, 0));
1354 /* If the MEM_REF has no acceptable address, try to get the base object
1355 from the original address we got, and build an all-aliasing
1356 unknown-sized access to that one. */
1357 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1358 set_mem_attributes (mem
, exp
, 0);
1359 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1360 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1363 exp
= build_fold_addr_expr (exp
);
1364 exp
= fold_build2 (MEM_REF
,
1365 build_array_type (char_type_node
,
1366 build_range_type (sizetype
,
1369 exp
, build_int_cst (ptr_type_node
, 0));
1370 set_mem_attributes (mem
, exp
, 0);
1372 set_mem_alias_set (mem
, 0);
1376 /* Built-in functions to perform an untyped call and return. */
1378 #define apply_args_mode \
1379 (this_target_builtins->x_apply_args_mode)
1380 #define apply_result_mode \
1381 (this_target_builtins->x_apply_result_mode)
1383 /* Return the size required for the block returned by __builtin_apply_args,
1384 and initialize apply_args_mode. */
1387 apply_args_size (void)
1389 static int size
= -1;
1394 /* The values computed by this function never change. */
1397 /* The first value is the incoming arg-pointer. */
1398 size
= GET_MODE_SIZE (Pmode
);
1400 /* The second value is the structure value address unless this is
1401 passed as an "invisible" first argument. */
1402 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1403 size
+= GET_MODE_SIZE (Pmode
);
1405 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1406 if (FUNCTION_ARG_REGNO_P (regno
))
1408 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1410 gcc_assert (mode
!= VOIDmode
);
1412 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1413 if (size
% align
!= 0)
1414 size
= CEIL (size
, align
) * align
;
1415 size
+= GET_MODE_SIZE (mode
);
1416 apply_args_mode
[regno
] = mode
;
1420 apply_args_mode
[regno
] = VOIDmode
;
1426 /* Return the size required for the block returned by __builtin_apply,
1427 and initialize apply_result_mode. */
1430 apply_result_size (void)
1432 static int size
= -1;
1436 /* The values computed by this function never change. */
1441 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1442 if (targetm
.calls
.function_value_regno_p (regno
))
1444 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1446 gcc_assert (mode
!= VOIDmode
);
1448 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1449 if (size
% align
!= 0)
1450 size
= CEIL (size
, align
) * align
;
1451 size
+= GET_MODE_SIZE (mode
);
1452 apply_result_mode
[regno
] = mode
;
1455 apply_result_mode
[regno
] = VOIDmode
;
1457 /* Allow targets that use untyped_call and untyped_return to override
1458 the size so that machine-specific information can be stored here. */
1459 #ifdef APPLY_RESULT_SIZE
1460 size
= APPLY_RESULT_SIZE
;
1466 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1467 /* Create a vector describing the result block RESULT. If SAVEP is true,
1468 the result block is used to save the values; otherwise it is used to
1469 restore the values. */
1472 result_vector (int savep
, rtx result
)
1474 int regno
, size
, align
, nelts
;
1477 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1480 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1481 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1483 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1484 if (size
% align
!= 0)
1485 size
= CEIL (size
, align
) * align
;
1486 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1487 mem
= adjust_address (result
, mode
, size
);
1488 savevec
[nelts
++] = (savep
1489 ? gen_rtx_SET (mem
, reg
)
1490 : gen_rtx_SET (reg
, mem
));
1491 size
+= GET_MODE_SIZE (mode
);
1493 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1495 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1497 /* Save the state required to perform an untyped call with the same
1498 arguments as were passed to the current function. */
1501 expand_builtin_apply_args_1 (void)
1504 int size
, align
, regno
;
1506 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1508 /* Create a block where the arg-pointer, structure value address,
1509 and argument registers can be saved. */
1510 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1512 /* Walk past the arg-pointer and structure value address. */
1513 size
= GET_MODE_SIZE (Pmode
);
1514 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1515 size
+= GET_MODE_SIZE (Pmode
);
1517 /* Save each register used in calling a function to the block. */
1518 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1519 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1521 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1522 if (size
% align
!= 0)
1523 size
= CEIL (size
, align
) * align
;
1525 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1527 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1528 size
+= GET_MODE_SIZE (mode
);
1531 /* Save the arg pointer to the block. */
1532 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1533 /* We need the pointer as the caller actually passed them to us, not
1534 as we might have pretended they were passed. Make sure it's a valid
1535 operand, as emit_move_insn isn't expected to handle a PLUS. */
1536 if (STACK_GROWS_DOWNWARD
)
1538 = force_operand (plus_constant (Pmode
, tem
,
1539 crtl
->args
.pretend_args_size
),
1541 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1543 size
= GET_MODE_SIZE (Pmode
);
1545 /* Save the structure value address unless this is passed as an
1546 "invisible" first argument. */
1547 if (struct_incoming_value
)
1549 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1550 copy_to_reg (struct_incoming_value
));
1551 size
+= GET_MODE_SIZE (Pmode
);
1554 /* Return the address of the block. */
1555 return copy_addr_to_reg (XEXP (registers
, 0));
1558 /* __builtin_apply_args returns block of memory allocated on
1559 the stack into which is stored the arg pointer, structure
1560 value address, static chain, and all the registers that might
1561 possibly be used in performing a function call. The code is
1562 moved to the start of the function so the incoming values are
1566 expand_builtin_apply_args (void)
1568 /* Don't do __builtin_apply_args more than once in a function.
1569 Save the result of the first call and reuse it. */
1570 if (apply_args_value
!= 0)
1571 return apply_args_value
;
1573 /* When this function is called, it means that registers must be
1574 saved on entry to this function. So we migrate the
1575 call to the first insn of this function. */
1579 temp
= expand_builtin_apply_args_1 ();
1580 rtx_insn
*seq
= get_insns ();
1583 apply_args_value
= temp
;
1585 /* Put the insns after the NOTE that starts the function.
1586 If this is inside a start_sequence, make the outer-level insn
1587 chain current, so the code is placed at the start of the
1588 function. If internal_arg_pointer is a non-virtual pseudo,
1589 it needs to be placed after the function that initializes
1591 push_topmost_sequence ();
1592 if (REG_P (crtl
->args
.internal_arg_pointer
)
1593 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1594 emit_insn_before (seq
, parm_birth_insn
);
1596 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1597 pop_topmost_sequence ();
1602 /* Perform an untyped call and save the state required to perform an
1603 untyped return of whatever value was returned by the given function. */
1606 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1608 int size
, align
, regno
;
1610 rtx incoming_args
, result
, reg
, dest
, src
;
1611 rtx_call_insn
*call_insn
;
1612 rtx old_stack_level
= 0;
1613 rtx call_fusage
= 0;
1614 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1616 arguments
= convert_memory_address (Pmode
, arguments
);
1618 /* Create a block where the return registers can be saved. */
1619 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1621 /* Fetch the arg pointer from the ARGUMENTS block. */
1622 incoming_args
= gen_reg_rtx (Pmode
);
1623 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1624 if (!STACK_GROWS_DOWNWARD
)
1625 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1626 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1628 /* Push a new argument block and copy the arguments. Do not allow
1629 the (potential) memcpy call below to interfere with our stack
1631 do_pending_stack_adjust ();
1634 /* Save the stack with nonlocal if available. */
1635 #ifdef HAVE_save_stack_nonlocal
1636 if (HAVE_save_stack_nonlocal
)
1637 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1640 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1642 /* Allocate a block of memory onto the stack and copy the memory
1643 arguments to the outgoing arguments address. We can pass TRUE
1644 as the 4th argument because we just saved the stack pointer
1645 and will restore it right after the call. */
1646 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1648 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1649 may have already set current_function_calls_alloca to true.
1650 current_function_calls_alloca won't be set if argsize is zero,
1651 so we have to guarantee need_drap is true here. */
1652 if (SUPPORTS_STACK_ALIGNMENT
)
1653 crtl
->need_drap
= true;
1655 dest
= virtual_outgoing_args_rtx
;
1656 if (!STACK_GROWS_DOWNWARD
)
1658 if (CONST_INT_P (argsize
))
1659 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1661 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1663 dest
= gen_rtx_MEM (BLKmode
, dest
);
1664 set_mem_align (dest
, PARM_BOUNDARY
);
1665 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1666 set_mem_align (src
, PARM_BOUNDARY
);
1667 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1669 /* Refer to the argument block. */
1671 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1672 set_mem_align (arguments
, PARM_BOUNDARY
);
1674 /* Walk past the arg-pointer and structure value address. */
1675 size
= GET_MODE_SIZE (Pmode
);
1677 size
+= GET_MODE_SIZE (Pmode
);
1679 /* Restore each of the registers previously saved. Make USE insns
1680 for each of these registers for use in making the call. */
1681 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1682 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1684 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1685 if (size
% align
!= 0)
1686 size
= CEIL (size
, align
) * align
;
1687 reg
= gen_rtx_REG (mode
, regno
);
1688 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1689 use_reg (&call_fusage
, reg
);
1690 size
+= GET_MODE_SIZE (mode
);
1693 /* Restore the structure value address unless this is passed as an
1694 "invisible" first argument. */
1695 size
= GET_MODE_SIZE (Pmode
);
1698 rtx value
= gen_reg_rtx (Pmode
);
1699 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1700 emit_move_insn (struct_value
, value
);
1701 if (REG_P (struct_value
))
1702 use_reg (&call_fusage
, struct_value
);
1703 size
+= GET_MODE_SIZE (Pmode
);
1706 /* All arguments and registers used for the call are set up by now! */
1707 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1709 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1710 and we don't want to load it into a register as an optimization,
1711 because prepare_call_address already did it if it should be done. */
1712 if (GET_CODE (function
) != SYMBOL_REF
)
1713 function
= memory_address (FUNCTION_MODE
, function
);
1715 /* Generate the actual call instruction and save the return value. */
1716 #ifdef HAVE_untyped_call
1717 if (HAVE_untyped_call
)
1718 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1719 result
, result_vector (1, result
)));
1722 #ifdef HAVE_call_value
1723 if (HAVE_call_value
)
1727 /* Locate the unique return register. It is not possible to
1728 express a call that sets more than one return register using
1729 call_value; use untyped_call for that. In fact, untyped_call
1730 only needs to save the return registers in the given block. */
1731 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1732 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1734 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1736 valreg
= gen_rtx_REG (mode
, regno
);
1739 emit_call_insn (GEN_CALL_VALUE (valreg
,
1740 gen_rtx_MEM (FUNCTION_MODE
, function
),
1741 const0_rtx
, NULL_RTX
, const0_rtx
));
1743 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1749 /* Find the CALL insn we just emitted, and attach the register usage
1751 call_insn
= last_call_insn ();
1752 add_function_usage_to (call_insn
, call_fusage
);
1754 /* Restore the stack. */
1755 #ifdef HAVE_save_stack_nonlocal
1756 if (HAVE_save_stack_nonlocal
)
1757 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1760 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1761 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1765 /* Return the address of the result block. */
1766 result
= copy_addr_to_reg (XEXP (result
, 0));
1767 return convert_memory_address (ptr_mode
, result
);
1770 /* Perform an untyped return. */
1773 expand_builtin_return (rtx result
)
1775 int size
, align
, regno
;
1778 rtx_insn
*call_fusage
= 0;
1780 result
= convert_memory_address (Pmode
, result
);
1782 apply_result_size ();
1783 result
= gen_rtx_MEM (BLKmode
, result
);
1785 #ifdef HAVE_untyped_return
1786 if (HAVE_untyped_return
)
1788 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1794 /* Restore the return value and note that each value is used. */
1796 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1797 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1799 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1800 if (size
% align
!= 0)
1801 size
= CEIL (size
, align
) * align
;
1802 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1803 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1805 push_to_sequence (call_fusage
);
1807 call_fusage
= get_insns ();
1809 size
+= GET_MODE_SIZE (mode
);
1812 /* Put the USE insns before the return. */
1813 emit_insn (call_fusage
);
1815 /* Return whatever values was restored by jumping directly to the end
1817 expand_naked_return ();
1820 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1822 static enum type_class
1823 type_to_class (tree type
)
1825 switch (TREE_CODE (type
))
1827 case VOID_TYPE
: return void_type_class
;
1828 case INTEGER_TYPE
: return integer_type_class
;
1829 case ENUMERAL_TYPE
: return enumeral_type_class
;
1830 case BOOLEAN_TYPE
: return boolean_type_class
;
1831 case POINTER_TYPE
: return pointer_type_class
;
1832 case REFERENCE_TYPE
: return reference_type_class
;
1833 case OFFSET_TYPE
: return offset_type_class
;
1834 case REAL_TYPE
: return real_type_class
;
1835 case COMPLEX_TYPE
: return complex_type_class
;
1836 case FUNCTION_TYPE
: return function_type_class
;
1837 case METHOD_TYPE
: return method_type_class
;
1838 case RECORD_TYPE
: return record_type_class
;
1840 case QUAL_UNION_TYPE
: return union_type_class
;
1841 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1842 ? string_type_class
: array_type_class
);
1843 case LANG_TYPE
: return lang_type_class
;
1844 default: return no_type_class
;
1848 /* Expand a call EXP to __builtin_classify_type. */
1851 expand_builtin_classify_type (tree exp
)
1853 if (call_expr_nargs (exp
))
1854 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1855 return GEN_INT (no_type_class
);
1858 /* This helper macro, meant to be used in mathfn_built_in below,
1859 determines which among a set of three builtin math functions is
1860 appropriate for a given type mode. The `F' and `L' cases are
1861 automatically generated from the `double' case. */
1862 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1863 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1864 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1865 fcodel = BUILT_IN_MATHFN##L ; break;
1866 /* Similar to above, but appends _R after any F/L suffix. */
1867 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1868 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1869 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1870 fcodel = BUILT_IN_MATHFN##L_R ; break;
1872 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1873 if available. If IMPLICIT is true use the implicit builtin declaration,
1874 otherwise use the explicit declaration. If we can't do the conversion,
1878 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1880 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1884 CASE_MATHFN (BUILT_IN_ACOS
)
1885 CASE_MATHFN (BUILT_IN_ACOSH
)
1886 CASE_MATHFN (BUILT_IN_ASIN
)
1887 CASE_MATHFN (BUILT_IN_ASINH
)
1888 CASE_MATHFN (BUILT_IN_ATAN
)
1889 CASE_MATHFN (BUILT_IN_ATAN2
)
1890 CASE_MATHFN (BUILT_IN_ATANH
)
1891 CASE_MATHFN (BUILT_IN_CBRT
)
1892 CASE_MATHFN (BUILT_IN_CEIL
)
1893 CASE_MATHFN (BUILT_IN_CEXPI
)
1894 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1895 CASE_MATHFN (BUILT_IN_COS
)
1896 CASE_MATHFN (BUILT_IN_COSH
)
1897 CASE_MATHFN (BUILT_IN_DREM
)
1898 CASE_MATHFN (BUILT_IN_ERF
)
1899 CASE_MATHFN (BUILT_IN_ERFC
)
1900 CASE_MATHFN (BUILT_IN_EXP
)
1901 CASE_MATHFN (BUILT_IN_EXP10
)
1902 CASE_MATHFN (BUILT_IN_EXP2
)
1903 CASE_MATHFN (BUILT_IN_EXPM1
)
1904 CASE_MATHFN (BUILT_IN_FABS
)
1905 CASE_MATHFN (BUILT_IN_FDIM
)
1906 CASE_MATHFN (BUILT_IN_FLOOR
)
1907 CASE_MATHFN (BUILT_IN_FMA
)
1908 CASE_MATHFN (BUILT_IN_FMAX
)
1909 CASE_MATHFN (BUILT_IN_FMIN
)
1910 CASE_MATHFN (BUILT_IN_FMOD
)
1911 CASE_MATHFN (BUILT_IN_FREXP
)
1912 CASE_MATHFN (BUILT_IN_GAMMA
)
1913 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1914 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1915 CASE_MATHFN (BUILT_IN_HYPOT
)
1916 CASE_MATHFN (BUILT_IN_ILOGB
)
1917 CASE_MATHFN (BUILT_IN_ICEIL
)
1918 CASE_MATHFN (BUILT_IN_IFLOOR
)
1919 CASE_MATHFN (BUILT_IN_INF
)
1920 CASE_MATHFN (BUILT_IN_IRINT
)
1921 CASE_MATHFN (BUILT_IN_IROUND
)
1922 CASE_MATHFN (BUILT_IN_ISINF
)
1923 CASE_MATHFN (BUILT_IN_J0
)
1924 CASE_MATHFN (BUILT_IN_J1
)
1925 CASE_MATHFN (BUILT_IN_JN
)
1926 CASE_MATHFN (BUILT_IN_LCEIL
)
1927 CASE_MATHFN (BUILT_IN_LDEXP
)
1928 CASE_MATHFN (BUILT_IN_LFLOOR
)
1929 CASE_MATHFN (BUILT_IN_LGAMMA
)
1930 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1931 CASE_MATHFN (BUILT_IN_LLCEIL
)
1932 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1933 CASE_MATHFN (BUILT_IN_LLRINT
)
1934 CASE_MATHFN (BUILT_IN_LLROUND
)
1935 CASE_MATHFN (BUILT_IN_LOG
)
1936 CASE_MATHFN (BUILT_IN_LOG10
)
1937 CASE_MATHFN (BUILT_IN_LOG1P
)
1938 CASE_MATHFN (BUILT_IN_LOG2
)
1939 CASE_MATHFN (BUILT_IN_LOGB
)
1940 CASE_MATHFN (BUILT_IN_LRINT
)
1941 CASE_MATHFN (BUILT_IN_LROUND
)
1942 CASE_MATHFN (BUILT_IN_MODF
)
1943 CASE_MATHFN (BUILT_IN_NAN
)
1944 CASE_MATHFN (BUILT_IN_NANS
)
1945 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1946 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1947 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1948 CASE_MATHFN (BUILT_IN_POW
)
1949 CASE_MATHFN (BUILT_IN_POWI
)
1950 CASE_MATHFN (BUILT_IN_POW10
)
1951 CASE_MATHFN (BUILT_IN_REMAINDER
)
1952 CASE_MATHFN (BUILT_IN_REMQUO
)
1953 CASE_MATHFN (BUILT_IN_RINT
)
1954 CASE_MATHFN (BUILT_IN_ROUND
)
1955 CASE_MATHFN (BUILT_IN_SCALB
)
1956 CASE_MATHFN (BUILT_IN_SCALBLN
)
1957 CASE_MATHFN (BUILT_IN_SCALBN
)
1958 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1959 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1960 CASE_MATHFN (BUILT_IN_SIN
)
1961 CASE_MATHFN (BUILT_IN_SINCOS
)
1962 CASE_MATHFN (BUILT_IN_SINH
)
1963 CASE_MATHFN (BUILT_IN_SQRT
)
1964 CASE_MATHFN (BUILT_IN_TAN
)
1965 CASE_MATHFN (BUILT_IN_TANH
)
1966 CASE_MATHFN (BUILT_IN_TGAMMA
)
1967 CASE_MATHFN (BUILT_IN_TRUNC
)
1968 CASE_MATHFN (BUILT_IN_Y0
)
1969 CASE_MATHFN (BUILT_IN_Y1
)
1970 CASE_MATHFN (BUILT_IN_YN
)
1976 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1978 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1980 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1985 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1988 return builtin_decl_explicit (fcode2
);
1991 /* Like mathfn_built_in_1(), but always use the implicit array. */
1994 mathfn_built_in (tree type
, enum built_in_function fn
)
1996 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1999 /* If errno must be maintained, expand the RTL to check if the result,
2000 TARGET, of a built-in function call, EXP, is NaN, and if so set
2004 expand_errno_check (tree exp
, rtx target
)
2006 rtx_code_label
*lab
= gen_label_rtx ();
2008 /* Test the result; if it is NaN, set errno=EDOM because
2009 the argument was not in the domain. */
2010 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
2011 NULL_RTX
, NULL
, lab
,
2012 /* The jump is very likely. */
2013 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
2016 /* If this built-in doesn't throw an exception, set errno directly. */
2017 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
2019 #ifdef GEN_ERRNO_RTX
2020 rtx errno_rtx
= GEN_ERRNO_RTX
;
2023 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
2025 emit_move_insn (errno_rtx
,
2026 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
2032 /* Make sure the library call isn't expanded as a tail call. */
2033 CALL_EXPR_TAILCALL (exp
) = 0;
2035 /* We can't set errno=EDOM directly; let the library call do it.
2036 Pop the arguments right away in case the call gets deleted. */
2038 expand_call (exp
, target
, 0);
2043 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding
2045 the function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's operands. */
2050 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2052 optab builtin_optab
;
2055 tree fndecl
= get_callee_fndecl (exp
);
2057 bool errno_set
= false;
2058 bool try_widening
= false;
2061 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2064 arg
= CALL_EXPR_ARG (exp
, 0);
2066 switch (DECL_FUNCTION_CODE (fndecl
))
2068 CASE_FLT_FN (BUILT_IN_SQRT
):
2069 errno_set
= ! tree_expr_nonnegative_p (arg
);
2070 try_widening
= true;
2071 builtin_optab
= sqrt_optab
;
2073 CASE_FLT_FN (BUILT_IN_EXP
):
2074 errno_set
= true; builtin_optab
= exp_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_EXP10
):
2076 CASE_FLT_FN (BUILT_IN_POW10
):
2077 errno_set
= true; builtin_optab
= exp10_optab
; break;
2078 CASE_FLT_FN (BUILT_IN_EXP2
):
2079 errno_set
= true; builtin_optab
= exp2_optab
; break;
2080 CASE_FLT_FN (BUILT_IN_EXPM1
):
2081 errno_set
= true; builtin_optab
= expm1_optab
; break;
2082 CASE_FLT_FN (BUILT_IN_LOGB
):
2083 errno_set
= true; builtin_optab
= logb_optab
; break;
2084 CASE_FLT_FN (BUILT_IN_LOG
):
2085 errno_set
= true; builtin_optab
= log_optab
; break;
2086 CASE_FLT_FN (BUILT_IN_LOG10
):
2087 errno_set
= true; builtin_optab
= log10_optab
; break;
2088 CASE_FLT_FN (BUILT_IN_LOG2
):
2089 errno_set
= true; builtin_optab
= log2_optab
; break;
2090 CASE_FLT_FN (BUILT_IN_LOG1P
):
2091 errno_set
= true; builtin_optab
= log1p_optab
; break;
2092 CASE_FLT_FN (BUILT_IN_ASIN
):
2093 builtin_optab
= asin_optab
; break;
2094 CASE_FLT_FN (BUILT_IN_ACOS
):
2095 builtin_optab
= acos_optab
; break;
2096 CASE_FLT_FN (BUILT_IN_TAN
):
2097 builtin_optab
= tan_optab
; break;
2098 CASE_FLT_FN (BUILT_IN_ATAN
):
2099 builtin_optab
= atan_optab
; break;
2100 CASE_FLT_FN (BUILT_IN_FLOOR
):
2101 builtin_optab
= floor_optab
; break;
2102 CASE_FLT_FN (BUILT_IN_CEIL
):
2103 builtin_optab
= ceil_optab
; break;
2104 CASE_FLT_FN (BUILT_IN_TRUNC
):
2105 builtin_optab
= btrunc_optab
; break;
2106 CASE_FLT_FN (BUILT_IN_ROUND
):
2107 builtin_optab
= round_optab
; break;
2108 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2109 builtin_optab
= nearbyint_optab
;
2110 if (flag_trapping_math
)
2112 /* Else fallthrough and expand as rint. */
2113 CASE_FLT_FN (BUILT_IN_RINT
):
2114 builtin_optab
= rint_optab
; break;
2115 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2116 builtin_optab
= significand_optab
; break;
2121 /* Make a suitable register to place result in. */
2122 mode
= TYPE_MODE (TREE_TYPE (exp
));
2124 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2127 /* Before working hard, check whether the instruction is available, but try
2128 to widen the mode for specific operations. */
2129 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2130 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2131 && (!errno_set
|| !optimize_insn_for_size_p ()))
2133 rtx result
= gen_reg_rtx (mode
);
2135 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2136 need to expand the argument again. This way, we will not perform
2137 side-effects more the once. */
2138 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2140 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2144 /* Compute into RESULT.
2145 Set RESULT to wherever the result comes back. */
2146 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2151 expand_errno_check (exp
, result
);
2153 /* Output the entire sequence. */
2154 insns
= get_insns ();
2160 /* If we were unable to expand via the builtin, stop the sequence
2161 (without outputting the insns) and call to the library function
2162 with the stabilized argument list. */
2166 return expand_call (exp
, target
, target
== const0_rtx
);
2169 /* Expand a call to the builtin binary math functions (pow and atan2).
2170 Return NULL_RTX if a normal call should be emitted rather than expanding the
2171 function in-line. EXP is the expression that is a call to the builtin
2172 function; if convenient, the result should be placed in TARGET.
2173 SUBTARGET may be used as the target for computing one of EXP's
2177 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2179 optab builtin_optab
;
2180 rtx op0
, op1
, result
;
2182 int op1_type
= REAL_TYPE
;
2183 tree fndecl
= get_callee_fndecl (exp
);
2186 bool errno_set
= true;
2188 switch (DECL_FUNCTION_CODE (fndecl
))
2190 CASE_FLT_FN (BUILT_IN_SCALBN
):
2191 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2192 CASE_FLT_FN (BUILT_IN_LDEXP
):
2193 op1_type
= INTEGER_TYPE
;
2198 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2201 arg0
= CALL_EXPR_ARG (exp
, 0);
2202 arg1
= CALL_EXPR_ARG (exp
, 1);
2204 switch (DECL_FUNCTION_CODE (fndecl
))
2206 CASE_FLT_FN (BUILT_IN_POW
):
2207 builtin_optab
= pow_optab
; break;
2208 CASE_FLT_FN (BUILT_IN_ATAN2
):
2209 builtin_optab
= atan2_optab
; break;
2210 CASE_FLT_FN (BUILT_IN_SCALB
):
2211 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2213 builtin_optab
= scalb_optab
; break;
2214 CASE_FLT_FN (BUILT_IN_SCALBN
):
2215 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2216 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2218 /* Fall through... */
2219 CASE_FLT_FN (BUILT_IN_LDEXP
):
2220 builtin_optab
= ldexp_optab
; break;
2221 CASE_FLT_FN (BUILT_IN_FMOD
):
2222 builtin_optab
= fmod_optab
; break;
2223 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2224 CASE_FLT_FN (BUILT_IN_DREM
):
2225 builtin_optab
= remainder_optab
; break;
2230 /* Make a suitable register to place result in. */
2231 mode
= TYPE_MODE (TREE_TYPE (exp
));
2233 /* Before working hard, check whether the instruction is available. */
2234 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2237 result
= gen_reg_rtx (mode
);
2239 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2242 if (errno_set
&& optimize_insn_for_size_p ())
2245 /* Always stabilize the argument list. */
2246 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2247 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2249 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2250 op1
= expand_normal (arg1
);
2254 /* Compute into RESULT.
2255 Set RESULT to wherever the result comes back. */
2256 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2257 result
, 0, OPTAB_DIRECT
);
2259 /* If we were unable to expand via the builtin, stop the sequence
2260 (without outputting the insns) and call to the library function
2261 with the stabilized argument list. */
2265 return expand_call (exp
, target
, target
== const0_rtx
);
2269 expand_errno_check (exp
, result
);
2271 /* Output the entire sequence. */
2272 insns
= get_insns ();
2279 /* Expand a call to the builtin trinary math functions (fma).
2280 Return NULL_RTX if a normal call should be emitted rather than expanding the
2281 function in-line. EXP is the expression that is a call to the builtin
2282 function; if convenient, the result should be placed in TARGET.
2283 SUBTARGET may be used as the target for computing one of EXP's
2287 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2289 optab builtin_optab
;
2290 rtx op0
, op1
, op2
, result
;
2292 tree fndecl
= get_callee_fndecl (exp
);
2293 tree arg0
, arg1
, arg2
;
2296 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2299 arg0
= CALL_EXPR_ARG (exp
, 0);
2300 arg1
= CALL_EXPR_ARG (exp
, 1);
2301 arg2
= CALL_EXPR_ARG (exp
, 2);
2303 switch (DECL_FUNCTION_CODE (fndecl
))
2305 CASE_FLT_FN (BUILT_IN_FMA
):
2306 builtin_optab
= fma_optab
; break;
2311 /* Make a suitable register to place result in. */
2312 mode
= TYPE_MODE (TREE_TYPE (exp
));
2314 /* Before working hard, check whether the instruction is available. */
2315 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2318 result
= gen_reg_rtx (mode
);
2320 /* Always stabilize the argument list. */
2321 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2322 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2323 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2325 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2326 op1
= expand_normal (arg1
);
2327 op2
= expand_normal (arg2
);
2331 /* Compute into RESULT.
2332 Set RESULT to wherever the result comes back. */
2333 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2336 /* If we were unable to expand via the builtin, stop the sequence
2337 (without outputting the insns) and call to the library function
2338 with the stabilized argument list. */
2342 return expand_call (exp
, target
, target
== const0_rtx
);
2345 /* Output the entire sequence. */
2346 insns
= get_insns ();
2353 /* Expand a call to the builtin sin and cos math functions.
2354 Return NULL_RTX if a normal call should be emitted rather than expanding the
2355 function in-line. EXP is the expression that is a call to the builtin
2356 function; if convenient, the result should be placed in TARGET.
2357 SUBTARGET may be used as the target for computing one of EXP's
2361 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2363 optab builtin_optab
;
2366 tree fndecl
= get_callee_fndecl (exp
);
2370 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2373 arg
= CALL_EXPR_ARG (exp
, 0);
2375 switch (DECL_FUNCTION_CODE (fndecl
))
2377 CASE_FLT_FN (BUILT_IN_SIN
):
2378 CASE_FLT_FN (BUILT_IN_COS
):
2379 builtin_optab
= sincos_optab
; break;
2384 /* Make a suitable register to place result in. */
2385 mode
= TYPE_MODE (TREE_TYPE (exp
));
2387 /* Check if sincos insn is available, otherwise fallback
2388 to sin or cos insn. */
2389 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2390 switch (DECL_FUNCTION_CODE (fndecl
))
2392 CASE_FLT_FN (BUILT_IN_SIN
):
2393 builtin_optab
= sin_optab
; break;
2394 CASE_FLT_FN (BUILT_IN_COS
):
2395 builtin_optab
= cos_optab
; break;
2400 /* Before working hard, check whether the instruction is available. */
2401 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2403 rtx result
= gen_reg_rtx (mode
);
2405 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2406 need to expand the argument again. This way, we will not perform
2407 side-effects more the once. */
2408 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2410 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2414 /* Compute into RESULT.
2415 Set RESULT to wherever the result comes back. */
2416 if (builtin_optab
== sincos_optab
)
2420 switch (DECL_FUNCTION_CODE (fndecl
))
2422 CASE_FLT_FN (BUILT_IN_SIN
):
2423 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2425 CASE_FLT_FN (BUILT_IN_COS
):
2426 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2434 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2438 /* Output the entire sequence. */
2439 insns
= get_insns ();
2445 /* If we were unable to expand via the builtin, stop the sequence
2446 (without outputting the insns) and call to the library function
2447 with the stabilized argument list. */
2451 return expand_call (exp
, target
, target
== const0_rtx
);
2454 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2455 return an RTL instruction code that implements the functionality.
2456 If that isn't possible or available return CODE_FOR_nothing. */
2458 static enum insn_code
2459 interclass_mathfn_icode (tree arg
, tree fndecl
)
2461 bool errno_set
= false;
2462 optab builtin_optab
= unknown_optab
;
2465 switch (DECL_FUNCTION_CODE (fndecl
))
2467 CASE_FLT_FN (BUILT_IN_ILOGB
):
2468 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2469 CASE_FLT_FN (BUILT_IN_ISINF
):
2470 builtin_optab
= isinf_optab
; break;
2471 case BUILT_IN_ISNORMAL
:
2472 case BUILT_IN_ISFINITE
:
2473 CASE_FLT_FN (BUILT_IN_FINITE
):
2474 case BUILT_IN_FINITED32
:
2475 case BUILT_IN_FINITED64
:
2476 case BUILT_IN_FINITED128
:
2477 case BUILT_IN_ISINFD32
:
2478 case BUILT_IN_ISINFD64
:
2479 case BUILT_IN_ISINFD128
:
2480 /* These builtins have no optabs (yet). */
2486 /* There's no easy way to detect the case we need to set EDOM. */
2487 if (flag_errno_math
&& errno_set
)
2488 return CODE_FOR_nothing
;
2490 /* Optab mode depends on the mode of the input argument. */
2491 mode
= TYPE_MODE (TREE_TYPE (arg
));
2494 return optab_handler (builtin_optab
, mode
);
2495 return CODE_FOR_nothing
;
2498 /* Expand a call to one of the builtin math functions that operate on
2499 floating point argument and output an integer result (ilogb, isinf,
2501 Return 0 if a normal call should be emitted rather than expanding the
2502 function in-line. EXP is the expression that is a call to the builtin
2503 function; if convenient, the result should be placed in TARGET. */
2506 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2508 enum insn_code icode
= CODE_FOR_nothing
;
2510 tree fndecl
= get_callee_fndecl (exp
);
2514 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2517 arg
= CALL_EXPR_ARG (exp
, 0);
2518 icode
= interclass_mathfn_icode (arg
, fndecl
);
2519 mode
= TYPE_MODE (TREE_TYPE (arg
));
2521 if (icode
!= CODE_FOR_nothing
)
2523 struct expand_operand ops
[1];
2524 rtx_insn
*last
= get_last_insn ();
2525 tree orig_arg
= arg
;
2527 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2528 need to expand the argument again. This way, we will not perform
2529 side-effects more the once. */
2530 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2532 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2534 if (mode
!= GET_MODE (op0
))
2535 op0
= convert_to_mode (mode
, op0
, 0);
2537 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2538 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2539 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2540 return ops
[0].value
;
2542 delete_insns_since (last
);
2543 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2549 /* Expand a call to the builtin sincos math function.
2550 Return NULL_RTX if a normal call should be emitted rather than expanding the
2551 function in-line. EXP is the expression that is a call to the builtin
2555 expand_builtin_sincos (tree exp
)
2557 rtx op0
, op1
, op2
, target1
, target2
;
2559 tree arg
, sinp
, cosp
;
2561 location_t loc
= EXPR_LOCATION (exp
);
2562 tree alias_type
, alias_off
;
2564 if (!validate_arglist (exp
, REAL_TYPE
,
2565 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2568 arg
= CALL_EXPR_ARG (exp
, 0);
2569 sinp
= CALL_EXPR_ARG (exp
, 1);
2570 cosp
= CALL_EXPR_ARG (exp
, 2);
2572 /* Make a suitable register to place result in. */
2573 mode
= TYPE_MODE (TREE_TYPE (arg
));
2575 /* Check if sincos insn is available, otherwise emit the call. */
2576 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2579 target1
= gen_reg_rtx (mode
);
2580 target2
= gen_reg_rtx (mode
);
2582 op0
= expand_normal (arg
);
2583 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2584 alias_off
= build_int_cst (alias_type
, 0);
2585 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2587 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2590 /* Compute into target1 and target2.
2591 Set TARGET to wherever the result comes back. */
2592 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2593 gcc_assert (result
);
2595 /* Move target1 and target2 to the memory locations indicated
2597 emit_move_insn (op1
, target1
);
2598 emit_move_insn (op2
, target2
);
2603 /* Expand a call to the internal cexpi builtin to the sincos math function.
2604 EXP is the expression that is a call to the builtin function; if convenient,
2605 the result should be placed in TARGET. */
2608 expand_builtin_cexpi (tree exp
, rtx target
)
2610 tree fndecl
= get_callee_fndecl (exp
);
2614 location_t loc
= EXPR_LOCATION (exp
);
2616 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2619 arg
= CALL_EXPR_ARG (exp
, 0);
2620 type
= TREE_TYPE (arg
);
2621 mode
= TYPE_MODE (TREE_TYPE (arg
));
2623 /* Try expanding via a sincos optab, fall back to emitting a libcall
2624 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2625 is only generated from sincos, cexp or if we have either of them. */
2626 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2628 op1
= gen_reg_rtx (mode
);
2629 op2
= gen_reg_rtx (mode
);
2631 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2633 /* Compute into op1 and op2. */
2634 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2636 else if (targetm
.libc_has_function (function_sincos
))
2638 tree call
, fn
= NULL_TREE
;
2642 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2643 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2644 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2645 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2646 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2647 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2651 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2652 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2653 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2654 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2655 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2656 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2658 /* Make sure not to fold the sincos call again. */
2659 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2660 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2661 call
, 3, arg
, top1
, top2
));
2665 tree call
, fn
= NULL_TREE
, narg
;
2666 tree ctype
= build_complex_type (type
);
2668 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2669 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2670 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2671 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2672 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2673 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2677 /* If we don't have a decl for cexp create one. This is the
2678 friendliest fallback if the user calls __builtin_cexpi
2679 without full target C99 function support. */
2680 if (fn
== NULL_TREE
)
2683 const char *name
= NULL
;
2685 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2687 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2689 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2692 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2693 fn
= build_fn_decl (name
, fntype
);
2696 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2697 build_real (type
, dconst0
), arg
);
2699 /* Make sure not to fold the cexp call again. */
2700 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2701 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2702 target
, VOIDmode
, EXPAND_NORMAL
);
2705 /* Now build the proper return type. */
2706 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2707 make_tree (TREE_TYPE (arg
), op2
),
2708 make_tree (TREE_TYPE (arg
), op1
)),
2709 target
, VOIDmode
, EXPAND_NORMAL
);
2712 /* Conveniently construct a function call expression. FNDECL names the
2713 function to be called, N is the number of arguments, and the "..."
2714 parameters are the argument expressions. Unlike build_call_exr
2715 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2718 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2721 tree fntype
= TREE_TYPE (fndecl
);
2722 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2725 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2727 SET_EXPR_LOCATION (fn
, loc
);
2731 /* Expand a call to one of the builtin rounding functions gcc defines
2732 as an extension (lfloor and lceil). As these are gcc extensions we
2733 do not need to worry about setting errno to EDOM.
2734 If expanding via optab fails, lower expression to (int)(floor(x)).
2735 EXP is the expression that is a call to the builtin function;
2736 if convenient, the result should be placed in TARGET. */
2739 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2741 convert_optab builtin_optab
;
2744 tree fndecl
= get_callee_fndecl (exp
);
2745 enum built_in_function fallback_fn
;
2746 tree fallback_fndecl
;
2750 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2753 arg
= CALL_EXPR_ARG (exp
, 0);
2755 switch (DECL_FUNCTION_CODE (fndecl
))
2757 CASE_FLT_FN (BUILT_IN_ICEIL
):
2758 CASE_FLT_FN (BUILT_IN_LCEIL
):
2759 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2760 builtin_optab
= lceil_optab
;
2761 fallback_fn
= BUILT_IN_CEIL
;
2764 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2765 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2766 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2767 builtin_optab
= lfloor_optab
;
2768 fallback_fn
= BUILT_IN_FLOOR
;
2775 /* Make a suitable register to place result in. */
2776 mode
= TYPE_MODE (TREE_TYPE (exp
));
2778 target
= gen_reg_rtx (mode
);
2780 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2781 need to expand the argument again. This way, we will not perform
2782 side-effects more the once. */
2783 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2785 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2789 /* Compute into TARGET. */
2790 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2792 /* Output the entire sequence. */
2793 insns
= get_insns ();
2799 /* If we were unable to expand via the builtin, stop the sequence
2800 (without outputting the insns). */
2803 /* Fall back to floating point rounding optab. */
2804 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2806 /* For non-C99 targets we may end up without a fallback fndecl here
2807 if the user called __builtin_lfloor directly. In this case emit
2808 a call to the floor/ceil variants nevertheless. This should result
2809 in the best user experience for not full C99 targets. */
2810 if (fallback_fndecl
== NULL_TREE
)
2813 const char *name
= NULL
;
2815 switch (DECL_FUNCTION_CODE (fndecl
))
2817 case BUILT_IN_ICEIL
:
2818 case BUILT_IN_LCEIL
:
2819 case BUILT_IN_LLCEIL
:
2822 case BUILT_IN_ICEILF
:
2823 case BUILT_IN_LCEILF
:
2824 case BUILT_IN_LLCEILF
:
2827 case BUILT_IN_ICEILL
:
2828 case BUILT_IN_LCEILL
:
2829 case BUILT_IN_LLCEILL
:
2832 case BUILT_IN_IFLOOR
:
2833 case BUILT_IN_LFLOOR
:
2834 case BUILT_IN_LLFLOOR
:
2837 case BUILT_IN_IFLOORF
:
2838 case BUILT_IN_LFLOORF
:
2839 case BUILT_IN_LLFLOORF
:
2842 case BUILT_IN_IFLOORL
:
2843 case BUILT_IN_LFLOORL
:
2844 case BUILT_IN_LLFLOORL
:
2851 fntype
= build_function_type_list (TREE_TYPE (arg
),
2852 TREE_TYPE (arg
), NULL_TREE
);
2853 fallback_fndecl
= build_fn_decl (name
, fntype
);
2856 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2858 tmp
= expand_normal (exp
);
2859 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2861 /* Truncate the result of floating point optab to integer
2862 via expand_fix (). */
2863 target
= gen_reg_rtx (mode
);
2864 expand_fix (target
, tmp
, 0);
2869 /* Expand a call to one of the builtin math functions doing integer
2871 Return 0 if a normal call should be emitted rather than expanding the
2872 function in-line. EXP is the expression that is a call to the builtin
2873 function; if convenient, the result should be placed in TARGET. */
2876 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2878 convert_optab builtin_optab
;
2881 tree fndecl
= get_callee_fndecl (exp
);
2884 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2886 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2889 arg
= CALL_EXPR_ARG (exp
, 0);
2891 switch (DECL_FUNCTION_CODE (fndecl
))
2893 CASE_FLT_FN (BUILT_IN_IRINT
):
2894 fallback_fn
= BUILT_IN_LRINT
;
2896 CASE_FLT_FN (BUILT_IN_LRINT
):
2897 CASE_FLT_FN (BUILT_IN_LLRINT
):
2898 builtin_optab
= lrint_optab
;
2901 CASE_FLT_FN (BUILT_IN_IROUND
):
2902 fallback_fn
= BUILT_IN_LROUND
;
2904 CASE_FLT_FN (BUILT_IN_LROUND
):
2905 CASE_FLT_FN (BUILT_IN_LLROUND
):
2906 builtin_optab
= lround_optab
;
2913 /* There's no easy way to detect the case we need to set EDOM. */
2914 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2917 /* Make a suitable register to place result in. */
2918 mode
= TYPE_MODE (TREE_TYPE (exp
));
2920 /* There's no easy way to detect the case we need to set EDOM. */
2921 if (!flag_errno_math
)
2923 rtx result
= gen_reg_rtx (mode
);
2925 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2926 need to expand the argument again. This way, we will not perform
2927 side-effects more the once. */
2928 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2930 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2934 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2936 /* Output the entire sequence. */
2937 insns
= get_insns ();
2943 /* If we were unable to expand via the builtin, stop the sequence
2944 (without outputting the insns) and call to the library function
2945 with the stabilized argument list. */
2949 if (fallback_fn
!= BUILT_IN_NONE
)
2951 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2952 targets, (int) round (x) should never be transformed into
2953 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2954 a call to lround in the hope that the target provides at least some
2955 C99 functions. This should result in the best user experience for
2956 not full C99 targets. */
2957 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2960 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2961 fallback_fndecl
, 1, arg
);
2963 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2964 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2965 return convert_to_mode (mode
, target
, 0);
2968 return expand_call (exp
, target
, target
== const0_rtx
);
2971 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2972 a normal call should be emitted rather than expanding the function
2973 in-line. EXP is the expression that is a call to the builtin
2974 function; if convenient, the result should be placed in TARGET. */
2977 expand_builtin_powi (tree exp
, rtx target
)
2984 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2987 arg0
= CALL_EXPR_ARG (exp
, 0);
2988 arg1
= CALL_EXPR_ARG (exp
, 1);
2989 mode
= TYPE_MODE (TREE_TYPE (exp
));
2991 /* Emit a libcall to libgcc. */
2993 /* Mode of the 2nd argument must match that of an int. */
2994 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2996 if (target
== NULL_RTX
)
2997 target
= gen_reg_rtx (mode
);
2999 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
3000 if (GET_MODE (op0
) != mode
)
3001 op0
= convert_to_mode (mode
, op0
, 0);
3002 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3003 if (GET_MODE (op1
) != mode2
)
3004 op1
= convert_to_mode (mode2
, op1
, 0);
3006 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3007 target
, LCT_CONST
, mode
, 2,
3008 op0
, mode
, op1
, mode2
);
3013 /* Expand expression EXP which is a call to the strlen builtin. Return
3014 NULL_RTX if we failed the caller should emit a normal call, otherwise
3015 try to get the result in TARGET, if convenient. */
3018 expand_builtin_strlen (tree exp
, rtx target
,
3019 machine_mode target_mode
)
3021 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3025 struct expand_operand ops
[4];
3028 tree src
= CALL_EXPR_ARG (exp
, 0);
3030 rtx_insn
*before_strlen
;
3031 machine_mode insn_mode
= target_mode
;
3032 enum insn_code icode
= CODE_FOR_nothing
;
3035 /* If the length can be computed at compile-time, return it. */
3036 len
= c_strlen (src
, 0);
3038 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3040 /* If the length can be computed at compile-time and is constant
3041 integer, but there are side-effects in src, evaluate
3042 src for side-effects, then return len.
3043 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3044 can be optimized into: i++; x = 3; */
3045 len
= c_strlen (src
, 1);
3046 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3048 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3049 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3052 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3054 /* If SRC is not a pointer type, don't do this operation inline. */
3058 /* Bail out if we can't compute strlen in the right mode. */
3059 while (insn_mode
!= VOIDmode
)
3061 icode
= optab_handler (strlen_optab
, insn_mode
);
3062 if (icode
!= CODE_FOR_nothing
)
3065 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3067 if (insn_mode
== VOIDmode
)
3070 /* Make a place to hold the source address. We will not expand
3071 the actual source until we are sure that the expansion will
3072 not fail -- there are trees that cannot be expanded twice. */
3073 src_reg
= gen_reg_rtx (Pmode
);
3075 /* Mark the beginning of the strlen sequence so we can emit the
3076 source operand later. */
3077 before_strlen
= get_last_insn ();
3079 create_output_operand (&ops
[0], target
, insn_mode
);
3080 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3081 create_integer_operand (&ops
[2], 0);
3082 create_integer_operand (&ops
[3], align
);
3083 if (!maybe_expand_insn (icode
, 4, ops
))
3086 /* Now that we are assured of success, expand the source. */
3088 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3091 #ifdef POINTERS_EXTEND_UNSIGNED
3092 if (GET_MODE (pat
) != Pmode
)
3093 pat
= convert_to_mode (Pmode
, pat
,
3094 POINTERS_EXTEND_UNSIGNED
);
3096 emit_move_insn (src_reg
, pat
);
3102 emit_insn_after (pat
, before_strlen
);
3104 emit_insn_before (pat
, get_insns ());
3106 /* Return the value in the proper mode for this function. */
3107 if (GET_MODE (ops
[0].value
) == target_mode
)
3108 target
= ops
[0].value
;
3109 else if (target
!= 0)
3110 convert_move (target
, ops
[0].value
, 0);
3112 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3118 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3119 bytes from constant string DATA + OFFSET and return it as target
3123 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3126 const char *str
= (const char *) data
;
3128 gcc_assert (offset
>= 0
3129 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3130 <= strlen (str
) + 1));
3132 return c_readstr (str
+ offset
, mode
);
3135 /* LEN specify length of the block of memcpy/memset operation.
3136 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3137 In some cases we can make very likely guess on max size, then we
3138 set it into PROBABLE_MAX_SIZE. */
3141 determine_block_size (tree len
, rtx len_rtx
,
3142 unsigned HOST_WIDE_INT
*min_size
,
3143 unsigned HOST_WIDE_INT
*max_size
,
3144 unsigned HOST_WIDE_INT
*probable_max_size
)
3146 if (CONST_INT_P (len_rtx
))
3148 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3154 enum value_range_type range_type
= VR_UNDEFINED
;
3156 /* Determine bounds from the type. */
3157 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3158 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3161 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3162 *probable_max_size
= *max_size
3163 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3165 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3167 if (TREE_CODE (len
) == SSA_NAME
)
3168 range_type
= get_range_info (len
, &min
, &max
);
3169 if (range_type
== VR_RANGE
)
3171 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3172 *min_size
= min
.to_uhwi ();
3173 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3174 *probable_max_size
= *max_size
= max
.to_uhwi ();
3176 else if (range_type
== VR_ANTI_RANGE
)
3178 /* Anti range 0...N lets us to determine minimal size to N+1. */
3181 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3182 *min_size
= max
.to_uhwi () + 1;
3190 Produce anti range allowing negative values of N. We still
3191 can use the information and make a guess that N is not negative.
3193 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3194 *probable_max_size
= min
.to_uhwi () - 1;
3197 gcc_checking_assert (*max_size
<=
3198 (unsigned HOST_WIDE_INT
)
3199 GET_MODE_MASK (GET_MODE (len_rtx
)));
3202 /* Helper function to do the actual work for expand_builtin_memcpy. */
3205 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3207 const char *src_str
;
3208 unsigned int src_align
= get_pointer_alignment (src
);
3209 unsigned int dest_align
= get_pointer_alignment (dest
);
3210 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3211 HOST_WIDE_INT expected_size
= -1;
3212 unsigned int expected_align
= 0;
3213 unsigned HOST_WIDE_INT min_size
;
3214 unsigned HOST_WIDE_INT max_size
;
3215 unsigned HOST_WIDE_INT probable_max_size
;
3217 /* If DEST is not a pointer type, call the normal function. */
3218 if (dest_align
== 0)
3221 /* If either SRC is not a pointer type, don't do this
3222 operation in-line. */
3226 if (currently_expanding_gimple_stmt
)
3227 stringop_block_profile (currently_expanding_gimple_stmt
,
3228 &expected_align
, &expected_size
);
3230 if (expected_align
< dest_align
)
3231 expected_align
= dest_align
;
3232 dest_mem
= get_memory_rtx (dest
, len
);
3233 set_mem_align (dest_mem
, dest_align
);
3234 len_rtx
= expand_normal (len
);
3235 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3236 &probable_max_size
);
3237 src_str
= c_getstr (src
);
3239 /* If SRC is a string constant and block move would be done
3240 by pieces, we can avoid loading the string from memory
3241 and only stored the computed constants. */
3243 && CONST_INT_P (len_rtx
)
3244 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3245 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3246 CONST_CAST (char *, src_str
),
3249 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3250 builtin_memcpy_read_str
,
3251 CONST_CAST (char *, src_str
),
3252 dest_align
, false, 0);
3253 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3254 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3258 src_mem
= get_memory_rtx (src
, len
);
3259 set_mem_align (src_mem
, src_align
);
3261 /* Copy word part most expediently. */
3262 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3263 CALL_EXPR_TAILCALL (exp
)
3264 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3265 expected_align
, expected_size
,
3266 min_size
, max_size
, probable_max_size
);
3270 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3271 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3277 /* Expand a call EXP to the memcpy builtin.
3278 Return NULL_RTX if we failed, the caller should emit a normal call,
3279 otherwise try to get the result in TARGET, if convenient (and in
3280 mode MODE if that's convenient). */
3283 expand_builtin_memcpy (tree exp
, rtx target
)
3285 if (!validate_arglist (exp
,
3286 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3290 tree dest
= CALL_EXPR_ARG (exp
, 0);
3291 tree src
= CALL_EXPR_ARG (exp
, 1);
3292 tree len
= CALL_EXPR_ARG (exp
, 2);
3293 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3297 /* Expand an instrumented call EXP to the memcpy builtin.
3298 Return NULL_RTX if we failed, the caller should emit a normal call,
3299 otherwise try to get the result in TARGET, if convenient (and in
3300 mode MODE if that's convenient). */
3303 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3305 if (!validate_arglist (exp
,
3306 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3307 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3308 INTEGER_TYPE
, VOID_TYPE
))
3312 tree dest
= CALL_EXPR_ARG (exp
, 0);
3313 tree src
= CALL_EXPR_ARG (exp
, 2);
3314 tree len
= CALL_EXPR_ARG (exp
, 4);
3315 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3317 /* Return src bounds with the result. */
3320 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3321 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3322 res
= chkp_join_splitted_slot (res
, bnd
);
3328 /* Expand a call EXP to the mempcpy builtin.
3329 Return NULL_RTX if we failed; the caller should emit a normal call,
3330 otherwise try to get the result in TARGET, if convenient (and in
3331 mode MODE if that's convenient). If ENDP is 0 return the
3332 destination pointer, if ENDP is 1 return the end pointer ala
3333 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3337 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3339 if (!validate_arglist (exp
,
3340 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3344 tree dest
= CALL_EXPR_ARG (exp
, 0);
3345 tree src
= CALL_EXPR_ARG (exp
, 1);
3346 tree len
= CALL_EXPR_ARG (exp
, 2);
3347 return expand_builtin_mempcpy_args (dest
, src
, len
,
3348 target
, mode
, /*endp=*/ 1,
3353 /* Expand an instrumented call EXP to the mempcpy builtin.
3354 Return NULL_RTX if we failed, the caller should emit a normal call,
3355 otherwise try to get the result in TARGET, if convenient (and in
3356 mode MODE if that's convenient). */
3359 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3361 if (!validate_arglist (exp
,
3362 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3363 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3364 INTEGER_TYPE
, VOID_TYPE
))
3368 tree dest
= CALL_EXPR_ARG (exp
, 0);
3369 tree src
= CALL_EXPR_ARG (exp
, 2);
3370 tree len
= CALL_EXPR_ARG (exp
, 4);
3371 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3374 /* Return src bounds with the result. */
3377 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3378 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3379 res
= chkp_join_splitted_slot (res
, bnd
);
3385 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3386 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3387 so that this can also be called without constructing an actual CALL_EXPR.
3388 The other arguments and return value are the same as for
3389 expand_builtin_mempcpy. */
3392 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3393 rtx target
, machine_mode mode
, int endp
,
3396 tree fndecl
= get_callee_fndecl (orig_exp
);
3398 /* If return value is ignored, transform mempcpy into memcpy. */
3399 if (target
== const0_rtx
3400 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3401 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3403 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3404 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3406 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3408 else if (target
== const0_rtx
3409 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3411 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3412 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3414 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3418 const char *src_str
;
3419 unsigned int src_align
= get_pointer_alignment (src
);
3420 unsigned int dest_align
= get_pointer_alignment (dest
);
3421 rtx dest_mem
, src_mem
, len_rtx
;
3423 /* If either SRC or DEST is not a pointer type, don't do this
3424 operation in-line. */
3425 if (dest_align
== 0 || src_align
== 0)
3428 /* If LEN is not constant, call the normal function. */
3429 if (! tree_fits_uhwi_p (len
))
3432 len_rtx
= expand_normal (len
);
3433 src_str
= c_getstr (src
);
3435 /* If SRC is a string constant and block move would be done
3436 by pieces, we can avoid loading the string from memory
3437 and only stored the computed constants. */
3439 && CONST_INT_P (len_rtx
)
3440 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3441 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3442 CONST_CAST (char *, src_str
),
3445 dest_mem
= get_memory_rtx (dest
, len
);
3446 set_mem_align (dest_mem
, dest_align
);
3447 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3448 builtin_memcpy_read_str
,
3449 CONST_CAST (char *, src_str
),
3450 dest_align
, false, endp
);
3451 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3452 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3456 if (CONST_INT_P (len_rtx
)
3457 && can_move_by_pieces (INTVAL (len_rtx
),
3458 MIN (dest_align
, src_align
)))
3460 dest_mem
= get_memory_rtx (dest
, len
);
3461 set_mem_align (dest_mem
, dest_align
);
3462 src_mem
= get_memory_rtx (src
, len
);
3463 set_mem_align (src_mem
, src_align
);
3464 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3465 MIN (dest_align
, src_align
), endp
);
3466 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3467 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3476 # define HAVE_movstr 0
3477 # define CODE_FOR_movstr CODE_FOR_nothing
3480 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3481 we failed, the caller should emit a normal call, otherwise try to
3482 get the result in TARGET, if convenient. If ENDP is 0 return the
3483 destination pointer, if ENDP is 1 return the end pointer ala
3484 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3488 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3490 struct expand_operand ops
[3];
3497 dest_mem
= get_memory_rtx (dest
, NULL
);
3498 src_mem
= get_memory_rtx (src
, NULL
);
3501 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3502 dest_mem
= replace_equiv_address (dest_mem
, target
);
3505 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3506 create_fixed_operand (&ops
[1], dest_mem
);
3507 create_fixed_operand (&ops
[2], src_mem
);
3508 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3511 if (endp
&& target
!= const0_rtx
)
3513 target
= ops
[0].value
;
3514 /* movstr is supposed to set end to the address of the NUL
3515 terminator. If the caller requested a mempcpy-like return value,
3519 rtx tem
= plus_constant (GET_MODE (target
),
3520 gen_lowpart (GET_MODE (target
), target
), 1);
3521 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3527 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3528 NULL_RTX if we failed the caller should emit a normal call, otherwise
3529 try to get the result in TARGET, if convenient (and in mode MODE if that's
3533 expand_builtin_strcpy (tree exp
, rtx target
)
3535 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3537 tree dest
= CALL_EXPR_ARG (exp
, 0);
3538 tree src
= CALL_EXPR_ARG (exp
, 1);
3539 return expand_builtin_strcpy_args (dest
, src
, target
);
3544 /* Helper function to do the actual work for expand_builtin_strcpy. The
3545 arguments to the builtin_strcpy call DEST and SRC are broken out
3546 so that this can also be called without constructing an actual CALL_EXPR.
3547 The other arguments and return value are the same as for
3548 expand_builtin_strcpy. */
3551 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3553 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3556 /* Expand a call EXP to the stpcpy builtin.
3557 Return NULL_RTX if we failed the caller should emit a normal call,
3558 otherwise try to get the result in TARGET, if convenient (and in
3559 mode MODE if that's convenient). */
3562 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3565 location_t loc
= EXPR_LOCATION (exp
);
3567 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3570 dst
= CALL_EXPR_ARG (exp
, 0);
3571 src
= CALL_EXPR_ARG (exp
, 1);
3573 /* If return value is ignored, transform stpcpy into strcpy. */
3574 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3576 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3577 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3578 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3585 /* Ensure we get an actual string whose length can be evaluated at
3586 compile-time, not an expression containing a string. This is
3587 because the latter will potentially produce pessimized code
3588 when used to produce the return value. */
3589 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3590 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3592 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3593 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3594 target
, mode
, /*endp=*/2,
3600 if (TREE_CODE (len
) == INTEGER_CST
)
3602 rtx len_rtx
= expand_normal (len
);
3604 if (CONST_INT_P (len_rtx
))
3606 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3612 if (mode
!= VOIDmode
)
3613 target
= gen_reg_rtx (mode
);
3615 target
= gen_reg_rtx (GET_MODE (ret
));
3617 if (GET_MODE (target
) != GET_MODE (ret
))
3618 ret
= gen_lowpart (GET_MODE (target
), ret
);
3620 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3621 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3629 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3633 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3634 bytes from constant string DATA + OFFSET and return it as target
3638 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3641 const char *str
= (const char *) data
;
3643 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3646 return c_readstr (str
+ offset
, mode
);
3649 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3650 NULL_RTX if we failed the caller should emit a normal call. */
3653 expand_builtin_strncpy (tree exp
, rtx target
)
3655 location_t loc
= EXPR_LOCATION (exp
);
3657 if (validate_arglist (exp
,
3658 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3660 tree dest
= CALL_EXPR_ARG (exp
, 0);
3661 tree src
= CALL_EXPR_ARG (exp
, 1);
3662 tree len
= CALL_EXPR_ARG (exp
, 2);
3663 tree slen
= c_strlen (src
, 1);
3665 /* We must be passed a constant len and src parameter. */
3666 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3669 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3671 /* We're required to pad with trailing zeros if the requested
3672 len is greater than strlen(s2)+1. In that case try to
3673 use store_by_pieces, if it fails, punt. */
3674 if (tree_int_cst_lt (slen
, len
))
3676 unsigned int dest_align
= get_pointer_alignment (dest
);
3677 const char *p
= c_getstr (src
);
3680 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3681 || !can_store_by_pieces (tree_to_uhwi (len
),
3682 builtin_strncpy_read_str
,
3683 CONST_CAST (char *, p
),
3687 dest_mem
= get_memory_rtx (dest
, len
);
3688 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3689 builtin_strncpy_read_str
,
3690 CONST_CAST (char *, p
), dest_align
, false, 0);
3691 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3692 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3699 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3700 bytes from constant string DATA + OFFSET and return it as target
3704 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3707 const char *c
= (const char *) data
;
3708 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3710 memset (p
, *c
, GET_MODE_SIZE (mode
));
3712 return c_readstr (p
, mode
);
3715 /* Callback routine for store_by_pieces. Return the RTL of a register
3716 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3717 char value given in the RTL register data. For example, if mode is
3718 4 bytes wide, return the RTL for 0x01010101*data. */
3721 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3728 size
= GET_MODE_SIZE (mode
);
3732 p
= XALLOCAVEC (char, size
);
3733 memset (p
, 1, size
);
3734 coeff
= c_readstr (p
, mode
);
3736 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3737 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3738 return force_reg (mode
, target
);
3741 /* Expand expression EXP, which is a call to the memset builtin. Return
3742 NULL_RTX if we failed the caller should emit a normal call, otherwise
3743 try to get the result in TARGET, if convenient (and in mode MODE if that's
3747 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3749 if (!validate_arglist (exp
,
3750 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3754 tree dest
= CALL_EXPR_ARG (exp
, 0);
3755 tree val
= CALL_EXPR_ARG (exp
, 1);
3756 tree len
= CALL_EXPR_ARG (exp
, 2);
3757 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3761 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3762 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3763 try to get the result in TARGET, if convenient (and in mode MODE if that's
3767 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3769 if (!validate_arglist (exp
,
3770 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3771 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3775 tree dest
= CALL_EXPR_ARG (exp
, 0);
3776 tree val
= CALL_EXPR_ARG (exp
, 2);
3777 tree len
= CALL_EXPR_ARG (exp
, 3);
3778 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3780 /* Return src bounds with the result. */
3783 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3784 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3785 res
= chkp_join_splitted_slot (res
, bnd
);
3791 /* Helper function to do the actual work for expand_builtin_memset. The
3792 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3793 so that this can also be called without constructing an actual CALL_EXPR.
3794 The other arguments and return value are the same as for
3795 expand_builtin_memset. */
3798 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3799 rtx target
, machine_mode mode
, tree orig_exp
)
3802 enum built_in_function fcode
;
3803 machine_mode val_mode
;
3805 unsigned int dest_align
;
3806 rtx dest_mem
, dest_addr
, len_rtx
;
3807 HOST_WIDE_INT expected_size
= -1;
3808 unsigned int expected_align
= 0;
3809 unsigned HOST_WIDE_INT min_size
;
3810 unsigned HOST_WIDE_INT max_size
;
3811 unsigned HOST_WIDE_INT probable_max_size
;
3813 dest_align
= get_pointer_alignment (dest
);
3815 /* If DEST is not a pointer type, don't do this operation in-line. */
3816 if (dest_align
== 0)
3819 if (currently_expanding_gimple_stmt
)
3820 stringop_block_profile (currently_expanding_gimple_stmt
,
3821 &expected_align
, &expected_size
);
3823 if (expected_align
< dest_align
)
3824 expected_align
= dest_align
;
3826 /* If the LEN parameter is zero, return DEST. */
3827 if (integer_zerop (len
))
3829 /* Evaluate and ignore VAL in case it has side-effects. */
3830 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3831 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3834 /* Stabilize the arguments in case we fail. */
3835 dest
= builtin_save_expr (dest
);
3836 val
= builtin_save_expr (val
);
3837 len
= builtin_save_expr (len
);
3839 len_rtx
= expand_normal (len
);
3840 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3841 &probable_max_size
);
3842 dest_mem
= get_memory_rtx (dest
, len
);
3843 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3845 if (TREE_CODE (val
) != INTEGER_CST
)
3849 val_rtx
= expand_normal (val
);
3850 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3852 /* Assume that we can memset by pieces if we can store
3853 * the coefficients by pieces (in the required modes).
3854 * We can't pass builtin_memset_gen_str as that emits RTL. */
3856 if (tree_fits_uhwi_p (len
)
3857 && can_store_by_pieces (tree_to_uhwi (len
),
3858 builtin_memset_read_str
, &c
, dest_align
,
3861 val_rtx
= force_reg (val_mode
, val_rtx
);
3862 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3863 builtin_memset_gen_str
, val_rtx
, dest_align
,
3866 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3867 dest_align
, expected_align
,
3868 expected_size
, min_size
, max_size
,
3872 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3873 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3877 if (target_char_cast (val
, &c
))
3882 if (tree_fits_uhwi_p (len
)
3883 && can_store_by_pieces (tree_to_uhwi (len
),
3884 builtin_memset_read_str
, &c
, dest_align
,
3886 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3887 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3888 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3889 gen_int_mode (c
, val_mode
),
3890 dest_align
, expected_align
,
3891 expected_size
, min_size
, max_size
,
3895 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3896 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3900 set_mem_align (dest_mem
, dest_align
);
3901 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3902 CALL_EXPR_TAILCALL (orig_exp
)
3903 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3904 expected_align
, expected_size
,
3910 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3911 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3917 fndecl
= get_callee_fndecl (orig_exp
);
3918 fcode
= DECL_FUNCTION_CODE (fndecl
);
3919 if (fcode
== BUILT_IN_MEMSET
3920 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3921 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3923 else if (fcode
== BUILT_IN_BZERO
)
3924 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3928 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3929 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3930 return expand_call (fn
, target
, target
== const0_rtx
);
3933 /* Expand expression EXP, which is a call to the bzero builtin. Return
3934 NULL_RTX if we failed the caller should emit a normal call. */
3937 expand_builtin_bzero (tree exp
)
3940 location_t loc
= EXPR_LOCATION (exp
);
3942 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3945 dest
= CALL_EXPR_ARG (exp
, 0);
3946 size
= CALL_EXPR_ARG (exp
, 1);
3948 /* New argument list transforming bzero(ptr x, int y) to
3949 memset(ptr x, int 0, size_t y). This is done this way
3950 so that if it isn't expanded inline, we fallback to
3951 calling bzero instead of memset. */
3953 return expand_builtin_memset_args (dest
, integer_zero_node
,
3954 fold_convert_loc (loc
,
3955 size_type_node
, size
),
3956 const0_rtx
, VOIDmode
, exp
);
3959 /* Expand expression EXP, which is a call to the memcmp built-in function.
3960 Return NULL_RTX if we failed and the caller should emit a normal call,
3961 otherwise try to get the result in TARGET, if convenient (and in mode
3962 MODE, if that's convenient). */
3965 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3966 ATTRIBUTE_UNUSED machine_mode mode
)
3968 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3970 if (!validate_arglist (exp
,
3971 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3974 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3975 implementing memcmp because it will stop if it encounters two
3977 #if defined HAVE_cmpmemsi
3979 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3982 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3983 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3984 tree len
= CALL_EXPR_ARG (exp
, 2);
3986 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3987 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3988 machine_mode insn_mode
;
3991 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3995 /* If we don't have POINTER_TYPE, call the function. */
3996 if (arg1_align
== 0 || arg2_align
== 0)
3999 /* Make a place to write the result of the instruction. */
4002 && REG_P (result
) && GET_MODE (result
) == insn_mode
4003 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4004 result
= gen_reg_rtx (insn_mode
);
4006 arg1_rtx
= get_memory_rtx (arg1
, len
);
4007 arg2_rtx
= get_memory_rtx (arg2
, len
);
4008 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4010 /* Set MEM_SIZE as appropriate. */
4011 if (CONST_INT_P (arg3_rtx
))
4013 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
4014 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
4018 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4019 GEN_INT (MIN (arg1_align
, arg2_align
)));
4026 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4027 TYPE_MODE (integer_type_node
), 3,
4028 XEXP (arg1_rtx
, 0), Pmode
,
4029 XEXP (arg2_rtx
, 0), Pmode
,
4030 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4031 TYPE_UNSIGNED (sizetype
)),
4032 TYPE_MODE (sizetype
));
4034 /* Return the value in the proper mode for this function. */
4035 mode
= TYPE_MODE (TREE_TYPE (exp
));
4036 if (GET_MODE (result
) == mode
)
4038 else if (target
!= 0)
4040 convert_move (target
, result
, 0);
4044 return convert_to_mode (mode
, result
, 0);
4046 #endif /* HAVE_cmpmemsi. */
4051 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4052 if we failed the caller should emit a normal call, otherwise try to get
4053 the result in TARGET, if convenient. */
4056 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4058 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4061 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4062 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4063 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4065 rtx arg1_rtx
, arg2_rtx
;
4066 rtx result
, insn
= NULL_RTX
;
4068 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4069 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4071 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4072 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4074 /* If we don't have POINTER_TYPE, call the function. */
4075 if (arg1_align
== 0 || arg2_align
== 0)
4078 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4079 arg1
= builtin_save_expr (arg1
);
4080 arg2
= builtin_save_expr (arg2
);
4082 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4083 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4085 #ifdef HAVE_cmpstrsi
4086 /* Try to call cmpstrsi. */
4089 machine_mode insn_mode
4090 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4092 /* Make a place to write the result of the instruction. */
4095 && REG_P (result
) && GET_MODE (result
) == insn_mode
4096 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4097 result
= gen_reg_rtx (insn_mode
);
4099 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4100 GEN_INT (MIN (arg1_align
, arg2_align
)));
4103 #ifdef HAVE_cmpstrnsi
4104 /* Try to determine at least one length and call cmpstrnsi. */
4105 if (!insn
&& HAVE_cmpstrnsi
)
4110 machine_mode insn_mode
4111 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4112 tree len1
= c_strlen (arg1
, 1);
4113 tree len2
= c_strlen (arg2
, 1);
4116 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4118 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4120 /* If we don't have a constant length for the first, use the length
4121 of the second, if we know it. We don't require a constant for
4122 this case; some cost analysis could be done if both are available
4123 but neither is constant. For now, assume they're equally cheap,
4124 unless one has side effects. If both strings have constant lengths,
4131 else if (TREE_SIDE_EFFECTS (len1
))
4133 else if (TREE_SIDE_EFFECTS (len2
))
4135 else if (TREE_CODE (len1
) != INTEGER_CST
)
4137 else if (TREE_CODE (len2
) != INTEGER_CST
)
4139 else if (tree_int_cst_lt (len1
, len2
))
4144 /* If both arguments have side effects, we cannot optimize. */
4145 if (!len
|| TREE_SIDE_EFFECTS (len
))
4148 arg3_rtx
= expand_normal (len
);
4150 /* Make a place to write the result of the instruction. */
4153 && REG_P (result
) && GET_MODE (result
) == insn_mode
4154 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4155 result
= gen_reg_rtx (insn_mode
);
4157 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4158 GEN_INT (MIN (arg1_align
, arg2_align
)));
4167 /* Return the value in the proper mode for this function. */
4168 mode
= TYPE_MODE (TREE_TYPE (exp
));
4169 if (GET_MODE (result
) == mode
)
4172 return convert_to_mode (mode
, result
, 0);
4173 convert_move (target
, result
, 0);
4177 /* Expand the library call ourselves using a stabilized argument
4178 list to avoid re-evaluating the function's arguments twice. */
4179 #ifdef HAVE_cmpstrnsi
4182 fndecl
= get_callee_fndecl (exp
);
4183 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4184 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4185 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4186 return expand_call (fn
, target
, target
== const0_rtx
);
4192 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4193 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4194 the result in TARGET, if convenient. */
4197 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4198 ATTRIBUTE_UNUSED machine_mode mode
)
4200 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4202 if (!validate_arglist (exp
,
4203 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4206 /* If c_strlen can determine an expression for one of the string
4207 lengths, and it doesn't have side effects, then emit cmpstrnsi
4208 using length MIN(strlen(string)+1, arg3). */
4209 #ifdef HAVE_cmpstrnsi
4212 tree len
, len1
, len2
;
4213 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4216 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4217 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4218 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4220 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4221 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4222 machine_mode insn_mode
4223 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4225 len1
= c_strlen (arg1
, 1);
4226 len2
= c_strlen (arg2
, 1);
4229 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4231 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4233 /* If we don't have a constant length for the first, use the length
4234 of the second, if we know it. We don't require a constant for
4235 this case; some cost analysis could be done if both are available
4236 but neither is constant. For now, assume they're equally cheap,
4237 unless one has side effects. If both strings have constant lengths,
4244 else if (TREE_SIDE_EFFECTS (len1
))
4246 else if (TREE_SIDE_EFFECTS (len2
))
4248 else if (TREE_CODE (len1
) != INTEGER_CST
)
4250 else if (TREE_CODE (len2
) != INTEGER_CST
)
4252 else if (tree_int_cst_lt (len1
, len2
))
4257 /* If both arguments have side effects, we cannot optimize. */
4258 if (!len
|| TREE_SIDE_EFFECTS (len
))
4261 /* The actual new length parameter is MIN(len,arg3). */
4262 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4263 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4265 /* If we don't have POINTER_TYPE, call the function. */
4266 if (arg1_align
== 0 || arg2_align
== 0)
4269 /* Make a place to write the result of the instruction. */
4272 && REG_P (result
) && GET_MODE (result
) == insn_mode
4273 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4274 result
= gen_reg_rtx (insn_mode
);
4276 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4277 arg1
= builtin_save_expr (arg1
);
4278 arg2
= builtin_save_expr (arg2
);
4279 len
= builtin_save_expr (len
);
4281 arg1_rtx
= get_memory_rtx (arg1
, len
);
4282 arg2_rtx
= get_memory_rtx (arg2
, len
);
4283 arg3_rtx
= expand_normal (len
);
4284 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4285 GEN_INT (MIN (arg1_align
, arg2_align
)));
4290 /* Return the value in the proper mode for this function. */
4291 mode
= TYPE_MODE (TREE_TYPE (exp
));
4292 if (GET_MODE (result
) == mode
)
4295 return convert_to_mode (mode
, result
, 0);
4296 convert_move (target
, result
, 0);
4300 /* Expand the library call ourselves using a stabilized argument
4301 list to avoid re-evaluating the function's arguments twice. */
4302 fndecl
= get_callee_fndecl (exp
);
4303 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4305 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4306 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4307 return expand_call (fn
, target
, target
== const0_rtx
);
4313 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4314 if that's convenient. */
4317 expand_builtin_saveregs (void)
4322 /* Don't do __builtin_saveregs more than once in a function.
4323 Save the result of the first call and reuse it. */
4324 if (saveregs_value
!= 0)
4325 return saveregs_value
;
4327 /* When this function is called, it means that registers must be
4328 saved on entry to this function. So we migrate the call to the
4329 first insn of this function. */
4333 /* Do whatever the machine needs done in this case. */
4334 val
= targetm
.calls
.expand_builtin_saveregs ();
4339 saveregs_value
= val
;
4341 /* Put the insns after the NOTE that starts the function. If this
4342 is inside a start_sequence, make the outer-level insn chain current, so
4343 the code is placed at the start of the function. */
4344 push_topmost_sequence ();
4345 emit_insn_after (seq
, entry_of_function ());
4346 pop_topmost_sequence ();
4351 /* Expand a call to __builtin_next_arg. */
4354 expand_builtin_next_arg (void)
4356 /* Checking arguments is already done in fold_builtin_next_arg
4357 that must be called before this function. */
4358 return expand_binop (ptr_mode
, add_optab
,
4359 crtl
->args
.internal_arg_pointer
,
4360 crtl
->args
.arg_offset_rtx
,
4361 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4364 /* Make it easier for the backends by protecting the valist argument
4365 from multiple evaluations. */
4368 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4370 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4372 /* The current way of determining the type of valist is completely
4373 bogus. We should have the information on the va builtin instead. */
4375 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4377 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4379 if (TREE_SIDE_EFFECTS (valist
))
4380 valist
= save_expr (valist
);
4382 /* For this case, the backends will be expecting a pointer to
4383 vatype, but it's possible we've actually been given an array
4384 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4386 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4388 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4389 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4394 tree pt
= build_pointer_type (vatype
);
4398 if (! TREE_SIDE_EFFECTS (valist
))
4401 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4402 TREE_SIDE_EFFECTS (valist
) = 1;
4405 if (TREE_SIDE_EFFECTS (valist
))
4406 valist
= save_expr (valist
);
4407 valist
= fold_build2_loc (loc
, MEM_REF
,
4408 vatype
, valist
, build_int_cst (pt
, 0));
4414 /* The "standard" definition of va_list is void*. */
4417 std_build_builtin_va_list (void)
4419 return ptr_type_node
;
4422 /* The "standard" abi va_list is va_list_type_node. */
4425 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4427 return va_list_type_node
;
4430 /* The "standard" type of va_list is va_list_type_node. */
4433 std_canonical_va_list_type (tree type
)
4437 if (INDIRECT_REF_P (type
))
4438 type
= TREE_TYPE (type
);
4439 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4440 type
= TREE_TYPE (type
);
4441 wtype
= va_list_type_node
;
4443 /* Treat structure va_list types. */
4444 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4445 htype
= TREE_TYPE (htype
);
4446 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4448 /* If va_list is an array type, the argument may have decayed
4449 to a pointer type, e.g. by being passed to another function.
4450 In that case, unwrap both types so that we can compare the
4451 underlying records. */
4452 if (TREE_CODE (htype
) == ARRAY_TYPE
4453 || POINTER_TYPE_P (htype
))
4455 wtype
= TREE_TYPE (wtype
);
4456 htype
= TREE_TYPE (htype
);
4459 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4460 return va_list_type_node
;
4465 /* The "standard" implementation of va_start: just assign `nextarg' to
4469 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4471 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4472 convert_move (va_r
, nextarg
, 0);
4474 /* We do not have any valid bounds for the pointer, so
4475 just store zero bounds for it. */
4476 if (chkp_function_instrumented_p (current_function_decl
))
4477 chkp_expand_bounds_reset_for_mem (valist
,
4478 make_tree (TREE_TYPE (valist
),
4482 /* Expand EXP, a call to __builtin_va_start. */
4485 expand_builtin_va_start (tree exp
)
4489 location_t loc
= EXPR_LOCATION (exp
);
4491 if (call_expr_nargs (exp
) < 2)
4493 error_at (loc
, "too few arguments to function %<va_start%>");
4497 if (fold_builtin_next_arg (exp
, true))
4500 nextarg
= expand_builtin_next_arg ();
4501 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4503 if (targetm
.expand_builtin_va_start
)
4504 targetm
.expand_builtin_va_start (valist
, nextarg
);
4506 std_expand_builtin_va_start (valist
, nextarg
);
4511 /* Expand EXP, a call to __builtin_va_end. */
4514 expand_builtin_va_end (tree exp
)
4516 tree valist
= CALL_EXPR_ARG (exp
, 0);
4518 /* Evaluate for side effects, if needed. I hate macros that don't
4520 if (TREE_SIDE_EFFECTS (valist
))
4521 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4526 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4527 builtin rather than just as an assignment in stdarg.h because of the
4528 nastiness of array-type va_list types. */
4531 expand_builtin_va_copy (tree exp
)
4534 location_t loc
= EXPR_LOCATION (exp
);
4536 dst
= CALL_EXPR_ARG (exp
, 0);
4537 src
= CALL_EXPR_ARG (exp
, 1);
4539 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4540 src
= stabilize_va_list_loc (loc
, src
, 0);
4542 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4544 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4546 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4547 TREE_SIDE_EFFECTS (t
) = 1;
4548 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4552 rtx dstb
, srcb
, size
;
4554 /* Evaluate to pointers. */
4555 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4556 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4557 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4558 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4560 dstb
= convert_memory_address (Pmode
, dstb
);
4561 srcb
= convert_memory_address (Pmode
, srcb
);
4563 /* "Dereference" to BLKmode memories. */
4564 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4565 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4566 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4567 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4568 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4569 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4572 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4578 /* Expand a call to one of the builtin functions __builtin_frame_address or
4579 __builtin_return_address. */
4582 expand_builtin_frame_address (tree fndecl
, tree exp
)
4584 /* The argument must be a nonnegative integer constant.
4585 It counts the number of frames to scan up the stack.
4586 The value is the return address saved in that frame. */
4587 if (call_expr_nargs (exp
) == 0)
4588 /* Warning about missing arg was already issued. */
4590 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4592 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4593 error ("invalid argument to %<__builtin_frame_address%>");
4595 error ("invalid argument to %<__builtin_return_address%>");
4601 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4602 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4604 /* Some ports cannot access arbitrary stack frames. */
4607 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4608 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4610 warning (0, "unsupported argument to %<__builtin_return_address%>");
4614 /* For __builtin_frame_address, return what we've got. */
4615 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4619 && ! CONSTANT_P (tem
))
4620 tem
= copy_addr_to_reg (tem
);
4625 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4626 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4627 is the same as for allocate_dynamic_stack_space. */
4630 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4636 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4637 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4640 = (alloca_with_align
4641 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4642 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4647 /* Compute the argument. */
4648 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4650 /* Compute the alignment. */
4651 align
= (alloca_with_align
4652 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4653 : BIGGEST_ALIGNMENT
);
4655 /* Allocate the desired space. */
4656 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4657 result
= convert_memory_address (ptr_mode
, result
);
4662 /* Expand a call to bswap builtin in EXP.
4663 Return NULL_RTX if a normal call should be emitted rather than expanding the
4664 function in-line. If convenient, the result should be placed in TARGET.
4665 SUBTARGET may be used as the target for computing one of EXP's operands. */
4668 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4674 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4677 arg
= CALL_EXPR_ARG (exp
, 0);
4678 op0
= expand_expr (arg
,
4679 subtarget
&& GET_MODE (subtarget
) == target_mode
4680 ? subtarget
: NULL_RTX
,
4681 target_mode
, EXPAND_NORMAL
);
4682 if (GET_MODE (op0
) != target_mode
)
4683 op0
= convert_to_mode (target_mode
, op0
, 1);
4685 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4687 gcc_assert (target
);
4689 return convert_to_mode (target_mode
, target
, 1);
4692 /* Expand a call to a unary builtin in EXP.
4693 Return NULL_RTX if a normal call should be emitted rather than expanding the
4694 function in-line. If convenient, the result should be placed in TARGET.
4695 SUBTARGET may be used as the target for computing one of EXP's operands. */
4698 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4699 rtx subtarget
, optab op_optab
)
4703 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4706 /* Compute the argument. */
4707 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4709 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4710 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4711 VOIDmode
, EXPAND_NORMAL
);
4712 /* Compute op, into TARGET if possible.
4713 Set TARGET to wherever the result comes back. */
4714 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4715 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4716 gcc_assert (target
);
4718 return convert_to_mode (target_mode
, target
, 0);
4721 /* Expand a call to __builtin_expect. We just return our argument
4722 as the builtin_expect semantic should've been already executed by
4723 tree branch prediction pass. */
4726 expand_builtin_expect (tree exp
, rtx target
)
4730 if (call_expr_nargs (exp
) < 2)
4732 arg
= CALL_EXPR_ARG (exp
, 0);
4734 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4735 /* When guessing was done, the hints should be already stripped away. */
4736 gcc_assert (!flag_guess_branch_prob
4737 || optimize
== 0 || seen_error ());
4741 /* Expand a call to __builtin_assume_aligned. We just return our first
4742 argument as the builtin_assume_aligned semantic should've been already
4746 expand_builtin_assume_aligned (tree exp
, rtx target
)
4748 if (call_expr_nargs (exp
) < 2)
4750 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4752 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4753 && (call_expr_nargs (exp
) < 3
4754 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4759 expand_builtin_trap (void)
4764 rtx_insn
*insn
= emit_insn (gen_trap ());
4765 /* For trap insns when not accumulating outgoing args force
4766 REG_ARGS_SIZE note to prevent crossjumping of calls with
4767 different args sizes. */
4768 if (!ACCUMULATE_OUTGOING_ARGS
)
4769 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4773 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4777 /* Expand a call to __builtin_unreachable. We do nothing except emit
4778 a barrier saying that control flow will not pass here.
4780 It is the responsibility of the program being compiled to ensure
4781 that control flow does never reach __builtin_unreachable. */
4783 expand_builtin_unreachable (void)
4788 /* Expand EXP, a call to fabs, fabsf or fabsl.
4789 Return NULL_RTX if a normal call should be emitted rather than expanding
4790 the function inline. If convenient, the result should be placed
4791 in TARGET. SUBTARGET may be used as the target for computing
4795 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4801 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4804 arg
= CALL_EXPR_ARG (exp
, 0);
4805 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4806 mode
= TYPE_MODE (TREE_TYPE (arg
));
4807 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4808 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4811 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4812 Return NULL is a normal call should be emitted rather than expanding the
4813 function inline. If convenient, the result should be placed in TARGET.
4814 SUBTARGET may be used as the target for computing the operand. */
4817 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4822 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4825 arg
= CALL_EXPR_ARG (exp
, 0);
4826 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4828 arg
= CALL_EXPR_ARG (exp
, 1);
4829 op1
= expand_normal (arg
);
4831 return expand_copysign (op0
, op1
, target
);
4834 /* Expand a call to __builtin___clear_cache. */
4837 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4839 #ifndef HAVE_clear_cache
4840 #ifdef CLEAR_INSN_CACHE
4841 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4842 does something. Just do the default expansion to a call to
4846 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4847 does nothing. There is no need to call it. Do nothing. */
4849 #endif /* CLEAR_INSN_CACHE */
4851 /* We have a "clear_cache" insn, and it will handle everything. */
4853 rtx begin_rtx
, end_rtx
;
4855 /* We must not expand to a library call. If we did, any
4856 fallback library function in libgcc that might contain a call to
4857 __builtin___clear_cache() would recurse infinitely. */
4858 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4860 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4864 if (HAVE_clear_cache
)
4866 struct expand_operand ops
[2];
4868 begin
= CALL_EXPR_ARG (exp
, 0);
4869 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4871 end
= CALL_EXPR_ARG (exp
, 1);
4872 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4874 create_address_operand (&ops
[0], begin_rtx
);
4875 create_address_operand (&ops
[1], end_rtx
);
4876 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4880 #endif /* HAVE_clear_cache */
4883 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4886 round_trampoline_addr (rtx tramp
)
4888 rtx temp
, addend
, mask
;
4890 /* If we don't need too much alignment, we'll have been guaranteed
4891 proper alignment by get_trampoline_type. */
4892 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4895 /* Round address up to desired boundary. */
4896 temp
= gen_reg_rtx (Pmode
);
4897 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4898 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4900 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4901 temp
, 0, OPTAB_LIB_WIDEN
);
4902 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4903 temp
, 0, OPTAB_LIB_WIDEN
);
4909 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4911 tree t_tramp
, t_func
, t_chain
;
4912 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4914 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4915 POINTER_TYPE
, VOID_TYPE
))
4918 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4919 t_func
= CALL_EXPR_ARG (exp
, 1);
4920 t_chain
= CALL_EXPR_ARG (exp
, 2);
4922 r_tramp
= expand_normal (t_tramp
);
4923 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4924 MEM_NOTRAP_P (m_tramp
) = 1;
4926 /* If ONSTACK, the TRAMP argument should be the address of a field
4927 within the local function's FRAME decl. Either way, let's see if
4928 we can fill in the MEM_ATTRs for this memory. */
4929 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4930 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4932 /* Creator of a heap trampoline is responsible for making sure the
4933 address is aligned to at least STACK_BOUNDARY. Normally malloc
4934 will ensure this anyhow. */
4935 tmp
= round_trampoline_addr (r_tramp
);
4938 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4939 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4940 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4943 /* The FUNC argument should be the address of the nested function.
4944 Extract the actual function decl to pass to the hook. */
4945 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4946 t_func
= TREE_OPERAND (t_func
, 0);
4947 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4949 r_chain
= expand_normal (t_chain
);
4951 /* Generate insns to initialize the trampoline. */
4952 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4956 trampolines_created
= 1;
4958 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4959 "trampoline generated for nested function %qD", t_func
);
4966 expand_builtin_adjust_trampoline (tree exp
)
4970 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4973 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4974 tramp
= round_trampoline_addr (tramp
);
4975 if (targetm
.calls
.trampoline_adjust_address
)
4976 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4981 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4982 function. The function first checks whether the back end provides
4983 an insn to implement signbit for the respective mode. If not, it
4984 checks whether the floating point format of the value is such that
4985 the sign bit can be extracted. If that is not the case, the
4986 function returns NULL_RTX to indicate that a normal call should be
4987 emitted rather than expanding the function in-line. EXP is the
4988 expression that is a call to the builtin function; if convenient,
4989 the result should be placed in TARGET. */
4991 expand_builtin_signbit (tree exp
, rtx target
)
4993 const struct real_format
*fmt
;
4994 machine_mode fmode
, imode
, rmode
;
4997 enum insn_code icode
;
4999 location_t loc
= EXPR_LOCATION (exp
);
5001 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5004 arg
= CALL_EXPR_ARG (exp
, 0);
5005 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5006 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5007 fmt
= REAL_MODE_FORMAT (fmode
);
5009 arg
= builtin_save_expr (arg
);
5011 /* Expand the argument yielding a RTX expression. */
5012 temp
= expand_normal (arg
);
5014 /* Check if the back end provides an insn that handles signbit for the
5016 icode
= optab_handler (signbit_optab
, fmode
);
5017 if (icode
!= CODE_FOR_nothing
)
5019 rtx_insn
*last
= get_last_insn ();
5020 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5021 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5023 delete_insns_since (last
);
5026 /* For floating point formats without a sign bit, implement signbit
5028 bitpos
= fmt
->signbit_ro
;
5031 /* But we can't do this if the format supports signed zero. */
5032 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5035 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5036 build_real (TREE_TYPE (arg
), dconst0
));
5037 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5040 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5042 imode
= int_mode_for_mode (fmode
);
5043 if (imode
== BLKmode
)
5045 temp
= gen_lowpart (imode
, temp
);
5050 /* Handle targets with different FP word orders. */
5051 if (FLOAT_WORDS_BIG_ENDIAN
)
5052 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5054 word
= bitpos
/ BITS_PER_WORD
;
5055 temp
= operand_subword_force (temp
, word
, fmode
);
5056 bitpos
= bitpos
% BITS_PER_WORD
;
5059 /* Force the intermediate word_mode (or narrower) result into a
5060 register. This avoids attempting to create paradoxical SUBREGs
5061 of floating point modes below. */
5062 temp
= force_reg (imode
, temp
);
5064 /* If the bitpos is within the "result mode" lowpart, the operation
5065 can be implement with a single bitwise AND. Otherwise, we need
5066 a right shift and an AND. */
5068 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5070 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5072 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5073 temp
= gen_lowpart (rmode
, temp
);
5074 temp
= expand_binop (rmode
, and_optab
, temp
,
5075 immed_wide_int_const (mask
, rmode
),
5076 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5080 /* Perform a logical right shift to place the signbit in the least
5081 significant bit, then truncate the result to the desired mode
5082 and mask just this bit. */
5083 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5084 temp
= gen_lowpart (rmode
, temp
);
5085 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5086 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5092 /* Expand fork or exec calls. TARGET is the desired target of the
5093 call. EXP is the call. FN is the
5094 identificator of the actual function. IGNORE is nonzero if the
5095 value is to be ignored. */
5098 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5103 /* If we are not profiling, just call the function. */
5104 if (!profile_arc_flag
)
5107 /* Otherwise call the wrapper. This should be equivalent for the rest of
5108 compiler, so the code does not diverge, and the wrapper may run the
5109 code necessary for keeping the profiling sane. */
5111 switch (DECL_FUNCTION_CODE (fn
))
5114 id
= get_identifier ("__gcov_fork");
5117 case BUILT_IN_EXECL
:
5118 id
= get_identifier ("__gcov_execl");
5121 case BUILT_IN_EXECV
:
5122 id
= get_identifier ("__gcov_execv");
5125 case BUILT_IN_EXECLP
:
5126 id
= get_identifier ("__gcov_execlp");
5129 case BUILT_IN_EXECLE
:
5130 id
= get_identifier ("__gcov_execle");
5133 case BUILT_IN_EXECVP
:
5134 id
= get_identifier ("__gcov_execvp");
5137 case BUILT_IN_EXECVE
:
5138 id
= get_identifier ("__gcov_execve");
5145 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5146 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5147 DECL_EXTERNAL (decl
) = 1;
5148 TREE_PUBLIC (decl
) = 1;
5149 DECL_ARTIFICIAL (decl
) = 1;
5150 TREE_NOTHROW (decl
) = 1;
5151 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5152 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5153 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5154 return expand_call (call
, target
, ignore
);
5159 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5160 the pointer in these functions is void*, the tree optimizers may remove
5161 casts. The mode computed in expand_builtin isn't reliable either, due
5162 to __sync_bool_compare_and_swap.
5164 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5165 group of builtins. This gives us log2 of the mode size. */
5167 static inline machine_mode
5168 get_builtin_sync_mode (int fcode_diff
)
5170 /* The size is not negotiable, so ask not to get BLKmode in return
5171 if the target indicates that a smaller size would be better. */
5172 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5175 /* Expand the memory expression LOC and return the appropriate memory operand
5176 for the builtin_sync operations. */
5179 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5183 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5184 addr
= convert_memory_address (Pmode
, addr
);
5186 /* Note that we explicitly do not want any alias information for this
5187 memory, so that we kill all other live memories. Otherwise we don't
5188 satisfy the full barrier semantics of the intrinsic. */
5189 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5191 /* The alignment needs to be at least according to that of the mode. */
5192 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5193 get_pointer_alignment (loc
)));
5194 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5195 MEM_VOLATILE_P (mem
) = 1;
5200 /* Make sure an argument is in the right mode.
5201 EXP is the tree argument.
5202 MODE is the mode it should be in. */
5205 expand_expr_force_mode (tree exp
, machine_mode mode
)
5208 machine_mode old_mode
;
5210 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5211 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5212 of CONST_INTs, where we know the old_mode only from the call argument. */
5214 old_mode
= GET_MODE (val
);
5215 if (old_mode
== VOIDmode
)
5216 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5217 val
= convert_modes (mode
, old_mode
, val
, 1);
5222 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5223 EXP is the CALL_EXPR. CODE is the rtx code
5224 that corresponds to the arithmetic or logical operation from the name;
5225 an exception here is that NOT actually means NAND. TARGET is an optional
5226 place for us to store the results; AFTER is true if this is the
5227 fetch_and_xxx form. */
5230 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5231 enum rtx_code code
, bool after
,
5235 location_t loc
= EXPR_LOCATION (exp
);
5237 if (code
== NOT
&& warn_sync_nand
)
5239 tree fndecl
= get_callee_fndecl (exp
);
5240 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5242 static bool warned_f_a_n
, warned_n_a_f
;
5246 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5247 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5248 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5249 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5250 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5254 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5255 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5256 warned_f_a_n
= true;
5259 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5260 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5261 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5262 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5263 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5267 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5268 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5269 warned_n_a_f
= true;
5277 /* Expand the operands. */
5278 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5279 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5281 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5285 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5286 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5287 true if this is the boolean form. TARGET is a place for us to store the
5288 results; this is NOT optional if IS_BOOL is true. */
5291 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5292 bool is_bool
, rtx target
)
5294 rtx old_val
, new_val
, mem
;
5297 /* Expand the operands. */
5298 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5299 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5300 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5302 pbool
= poval
= NULL
;
5303 if (target
!= const0_rtx
)
5310 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5311 false, MEMMODEL_SYNC_SEQ_CST
,
5312 MEMMODEL_SYNC_SEQ_CST
))
5318 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5319 general form is actually an atomic exchange, and some targets only
5320 support a reduced form with the second argument being a constant 1.
5321 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5325 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5330 /* Expand the operands. */
5331 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5332 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5334 return expand_sync_lock_test_and_set (target
, mem
, val
);
5337 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5340 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5344 /* Expand the operands. */
5345 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5347 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5350 /* Given an integer representing an ``enum memmodel'', verify its
5351 correctness and return the memory model enum. */
5353 static enum memmodel
5354 get_memmodel (tree exp
)
5357 unsigned HOST_WIDE_INT val
;
5359 /* If the parameter is not a constant, it's a run time value so we'll just
5360 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5361 if (TREE_CODE (exp
) != INTEGER_CST
)
5362 return MEMMODEL_SEQ_CST
;
5364 op
= expand_normal (exp
);
5367 if (targetm
.memmodel_check
)
5368 val
= targetm
.memmodel_check (val
);
5369 else if (val
& ~MEMMODEL_MASK
)
5371 warning (OPT_Winvalid_memory_model
,
5372 "Unknown architecture specifier in memory model to builtin.");
5373 return MEMMODEL_SEQ_CST
;
5376 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5377 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5379 warning (OPT_Winvalid_memory_model
,
5380 "invalid memory model argument to builtin");
5381 return MEMMODEL_SEQ_CST
;
5384 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5385 be conservative and promote consume to acquire. */
5386 if (val
== MEMMODEL_CONSUME
)
5387 val
= MEMMODEL_ACQUIRE
;
5389 return (enum memmodel
) val
;
5392 /* Expand the __atomic_exchange intrinsic:
5393 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5394 EXP is the CALL_EXPR.
5395 TARGET is an optional place for us to store the results. */
5398 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5401 enum memmodel model
;
5403 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5405 if (!flag_inline_atomics
)
5408 /* Expand the operands. */
5409 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5410 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5412 return expand_atomic_exchange (target
, mem
, val
, model
);
5415 /* Expand the __atomic_compare_exchange intrinsic:
5416 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5417 TYPE desired, BOOL weak,
5418 enum memmodel success,
5419 enum memmodel failure)
5420 EXP is the CALL_EXPR.
5421 TARGET is an optional place for us to store the results. */
5424 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5427 rtx expect
, desired
, mem
, oldval
;
5428 rtx_code_label
*label
;
5429 enum memmodel success
, failure
;
5433 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5434 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5436 if (failure
> success
)
5438 warning (OPT_Winvalid_memory_model
,
5439 "failure memory model cannot be stronger than success memory "
5440 "model for %<__atomic_compare_exchange%>");
5441 success
= MEMMODEL_SEQ_CST
;
5444 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5446 warning (OPT_Winvalid_memory_model
,
5447 "invalid failure memory model for "
5448 "%<__atomic_compare_exchange%>");
5449 failure
= MEMMODEL_SEQ_CST
;
5450 success
= MEMMODEL_SEQ_CST
;
5454 if (!flag_inline_atomics
)
5457 /* Expand the operands. */
5458 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5460 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5461 expect
= convert_memory_address (Pmode
, expect
);
5462 expect
= gen_rtx_MEM (mode
, expect
);
5463 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5465 weak
= CALL_EXPR_ARG (exp
, 3);
5467 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5470 if (target
== const0_rtx
)
5473 /* Lest the rtl backend create a race condition with an imporoper store
5474 to memory, always create a new pseudo for OLDVAL. */
5477 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5478 is_weak
, success
, failure
))
5481 /* Conditionally store back to EXPECT, lest we create a race condition
5482 with an improper store to memory. */
5483 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5484 the normal case where EXPECT is totally private, i.e. a register. At
5485 which point the store can be unconditional. */
5486 label
= gen_label_rtx ();
5487 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5488 GET_MODE (target
), 1, label
);
5489 emit_move_insn (expect
, oldval
);
5495 /* Expand the __atomic_load intrinsic:
5496 TYPE __atomic_load (TYPE *object, enum memmodel)
5497 EXP is the CALL_EXPR.
5498 TARGET is an optional place for us to store the results. */
5501 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5504 enum memmodel model
;
5506 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5507 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5509 warning (OPT_Winvalid_memory_model
,
5510 "invalid memory model for %<__atomic_load%>");
5511 model
= MEMMODEL_SEQ_CST
;
5514 if (!flag_inline_atomics
)
5517 /* Expand the operand. */
5518 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5520 return expand_atomic_load (target
, mem
, model
);
5524 /* Expand the __atomic_store intrinsic:
5525 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5526 EXP is the CALL_EXPR.
5527 TARGET is an optional place for us to store the results. */
5530 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5533 enum memmodel model
;
5535 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5536 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5537 || is_mm_release (model
)))
5539 warning (OPT_Winvalid_memory_model
,
5540 "invalid memory model for %<__atomic_store%>");
5541 model
= MEMMODEL_SEQ_CST
;
5544 if (!flag_inline_atomics
)
5547 /* Expand the operands. */
5548 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5549 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5551 return expand_atomic_store (mem
, val
, model
, false);
5554 /* Expand the __atomic_fetch_XXX intrinsic:
5555 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5556 EXP is the CALL_EXPR.
5557 TARGET is an optional place for us to store the results.
5558 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5559 FETCH_AFTER is true if returning the result of the operation.
5560 FETCH_AFTER is false if returning the value before the operation.
5561 IGNORE is true if the result is not used.
5562 EXT_CALL is the correct builtin for an external call if this cannot be
5563 resolved to an instruction sequence. */
5566 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5567 enum rtx_code code
, bool fetch_after
,
5568 bool ignore
, enum built_in_function ext_call
)
5571 enum memmodel model
;
5575 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5577 /* Expand the operands. */
5578 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5579 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5581 /* Only try generating instructions if inlining is turned on. */
5582 if (flag_inline_atomics
)
5584 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5589 /* Return if a different routine isn't needed for the library call. */
5590 if (ext_call
== BUILT_IN_NONE
)
5593 /* Change the call to the specified function. */
5594 fndecl
= get_callee_fndecl (exp
);
5595 addr
= CALL_EXPR_FN (exp
);
5598 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5599 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5601 /* Expand the call here so we can emit trailing code. */
5602 ret
= expand_call (exp
, target
, ignore
);
5604 /* Replace the original function just in case it matters. */
5605 TREE_OPERAND (addr
, 0) = fndecl
;
5607 /* Then issue the arithmetic correction to return the right result. */
5612 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5614 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5617 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5624 #ifndef HAVE_atomic_clear
5625 # define HAVE_atomic_clear 0
5626 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5629 /* Expand an atomic clear operation.
5630 void _atomic_clear (BOOL *obj, enum memmodel)
5631 EXP is the call expression. */
5634 expand_builtin_atomic_clear (tree exp
)
5638 enum memmodel model
;
5640 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5641 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5642 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5644 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5646 warning (OPT_Winvalid_memory_model
,
5647 "invalid memory model for %<__atomic_store%>");
5648 model
= MEMMODEL_SEQ_CST
;
5651 if (HAVE_atomic_clear
)
5653 emit_insn (gen_atomic_clear (mem
, model
));
5657 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5658 Failing that, a store is issued by __atomic_store. The only way this can
5659 fail is if the bool type is larger than a word size. Unlikely, but
5660 handle it anyway for completeness. Assume a single threaded model since
5661 there is no atomic support in this case, and no barriers are required. */
5662 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5664 emit_move_insn (mem
, const0_rtx
);
5668 /* Expand an atomic test_and_set operation.
5669 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5670 EXP is the call expression. */
5673 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5676 enum memmodel model
;
5679 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5680 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5681 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5683 return expand_atomic_test_and_set (target
, mem
, model
);
5687 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5688 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5691 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5695 unsigned int mode_align
, type_align
;
5697 if (TREE_CODE (arg0
) != INTEGER_CST
)
5700 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5701 mode
= mode_for_size (size
, MODE_INT
, 0);
5702 mode_align
= GET_MODE_ALIGNMENT (mode
);
5704 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5705 type_align
= mode_align
;
5708 tree ttype
= TREE_TYPE (arg1
);
5710 /* This function is usually invoked and folded immediately by the front
5711 end before anything else has a chance to look at it. The pointer
5712 parameter at this point is usually cast to a void *, so check for that
5713 and look past the cast. */
5714 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5715 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5716 arg1
= TREE_OPERAND (arg1
, 0);
5718 ttype
= TREE_TYPE (arg1
);
5719 gcc_assert (POINTER_TYPE_P (ttype
));
5721 /* Get the underlying type of the object. */
5722 ttype
= TREE_TYPE (ttype
);
5723 type_align
= TYPE_ALIGN (ttype
);
5726 /* If the object has smaller alignment, the the lock free routines cannot
5728 if (type_align
< mode_align
)
5729 return boolean_false_node
;
5731 /* Check if a compare_and_swap pattern exists for the mode which represents
5732 the required size. The pattern is not allowed to fail, so the existence
5733 of the pattern indicates support is present. */
5734 if (can_compare_and_swap_p (mode
, true))
5735 return boolean_true_node
;
5737 return boolean_false_node
;
5740 /* Return true if the parameters to call EXP represent an object which will
5741 always generate lock free instructions. The first argument represents the
5742 size of the object, and the second parameter is a pointer to the object
5743 itself. If NULL is passed for the object, then the result is based on
5744 typical alignment for an object of the specified size. Otherwise return
5748 expand_builtin_atomic_always_lock_free (tree exp
)
5751 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5752 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5754 if (TREE_CODE (arg0
) != INTEGER_CST
)
5756 error ("non-constant argument 1 to __atomic_always_lock_free");
5760 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5761 if (size
== boolean_true_node
)
5766 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5767 is lock free on this architecture. */
5770 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5772 if (!flag_inline_atomics
)
5775 /* If it isn't always lock free, don't generate a result. */
5776 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5777 return boolean_true_node
;
5782 /* Return true if the parameters to call EXP represent an object which will
5783 always generate lock free instructions. The first argument represents the
5784 size of the object, and the second parameter is a pointer to the object
5785 itself. If NULL is passed for the object, then the result is based on
5786 typical alignment for an object of the specified size. Otherwise return
5790 expand_builtin_atomic_is_lock_free (tree exp
)
5793 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5794 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5796 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5798 error ("non-integer argument 1 to __atomic_is_lock_free");
5802 if (!flag_inline_atomics
)
5805 /* If the value is known at compile time, return the RTX for it. */
5806 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5807 if (size
== boolean_true_node
)
5813 /* Expand the __atomic_thread_fence intrinsic:
5814 void __atomic_thread_fence (enum memmodel)
5815 EXP is the CALL_EXPR. */
5818 expand_builtin_atomic_thread_fence (tree exp
)
5820 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5821 expand_mem_thread_fence (model
);
5824 /* Expand the __atomic_signal_fence intrinsic:
5825 void __atomic_signal_fence (enum memmodel)
5826 EXP is the CALL_EXPR. */
5829 expand_builtin_atomic_signal_fence (tree exp
)
5831 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5832 expand_mem_signal_fence (model
);
5835 /* Expand the __sync_synchronize intrinsic. */
5838 expand_builtin_sync_synchronize (void)
5840 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5844 expand_builtin_thread_pointer (tree exp
, rtx target
)
5846 enum insn_code icode
;
5847 if (!validate_arglist (exp
, VOID_TYPE
))
5849 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5850 if (icode
!= CODE_FOR_nothing
)
5852 struct expand_operand op
;
5853 /* If the target is not sutitable then create a new target. */
5854 if (target
== NULL_RTX
5856 || GET_MODE (target
) != Pmode
)
5857 target
= gen_reg_rtx (Pmode
);
5858 create_output_operand (&op
, target
, Pmode
);
5859 expand_insn (icode
, 1, &op
);
5862 error ("__builtin_thread_pointer is not supported on this target");
5867 expand_builtin_set_thread_pointer (tree exp
)
5869 enum insn_code icode
;
5870 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5872 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5873 if (icode
!= CODE_FOR_nothing
)
5875 struct expand_operand op
;
5876 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5877 Pmode
, EXPAND_NORMAL
);
5878 create_input_operand (&op
, val
, Pmode
);
5879 expand_insn (icode
, 1, &op
);
5882 error ("__builtin_set_thread_pointer is not supported on this target");
5886 /* Emit code to restore the current value of stack. */
5889 expand_stack_restore (tree var
)
5892 rtx sa
= expand_normal (var
);
5894 sa
= convert_memory_address (Pmode
, sa
);
5896 prev
= get_last_insn ();
5897 emit_stack_restore (SAVE_BLOCK
, sa
);
5899 record_new_stack_level ();
5901 fixup_args_size_notes (prev
, get_last_insn (), 0);
5904 /* Emit code to save the current value of stack. */
5907 expand_stack_save (void)
5911 emit_stack_save (SAVE_BLOCK
, &ret
);
5916 /* Expand OpenACC acc_on_device.
5918 This has to happen late (that is, not in early folding; expand_builtin_*,
5919 rather than fold_builtin_*), as we have to act differently for host and
5920 acceleration device (ACCEL_COMPILER conditional). */
5923 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED
,
5924 rtx target ATTRIBUTE_UNUSED
)
5926 #ifdef ACCEL_COMPILER
5927 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5930 tree arg
= CALL_EXPR_ARG (exp
, 0);
5932 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5933 machine_mode v_mode
= TYPE_MODE (TREE_TYPE (arg
));
5934 rtx v
= expand_normal (arg
), v1
, v2
;
5935 v1
= GEN_INT (GOMP_DEVICE_NOT_HOST
);
5936 v2
= GEN_INT (ACCEL_COMPILER_acc_device
);
5937 machine_mode target_mode
= TYPE_MODE (integer_type_node
);
5938 if (!target
|| !register_operand (target
, target_mode
))
5939 target
= gen_reg_rtx (target_mode
);
5940 emit_move_insn (target
, const1_rtx
);
5941 rtx_code_label
*done_label
= gen_label_rtx ();
5942 do_compare_rtx_and_jump (v
, v1
, EQ
, false, v_mode
, NULL_RTX
,
5943 NULL
, done_label
, PROB_EVEN
);
5944 do_compare_rtx_and_jump (v
, v2
, EQ
, false, v_mode
, NULL_RTX
,
5945 NULL
, done_label
, PROB_EVEN
);
5946 emit_move_insn (target
, const0_rtx
);
5947 emit_label (done_label
);
5956 /* Expand an expression EXP that calls a built-in function,
5957 with result going to TARGET if that's convenient
5958 (and in mode MODE if that's convenient).
5959 SUBTARGET may be used as the target for computing one of EXP's operands.
5960 IGNORE is nonzero if the value is to be ignored. */
5963 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5966 tree fndecl
= get_callee_fndecl (exp
);
5967 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5968 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5971 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5972 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5974 /* When ASan is enabled, we don't want to expand some memory/string
5975 builtins and rely on libsanitizer's hooks. This allows us to avoid
5976 redundant checks and be sure, that possible overflow will be detected
5979 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5980 return expand_call (exp
, target
, ignore
);
5982 /* When not optimizing, generate calls to library functions for a certain
5985 && !called_as_built_in (fndecl
)
5986 && fcode
!= BUILT_IN_FORK
5987 && fcode
!= BUILT_IN_EXECL
5988 && fcode
!= BUILT_IN_EXECV
5989 && fcode
!= BUILT_IN_EXECLP
5990 && fcode
!= BUILT_IN_EXECLE
5991 && fcode
!= BUILT_IN_EXECVP
5992 && fcode
!= BUILT_IN_EXECVE
5993 && fcode
!= BUILT_IN_ALLOCA
5994 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5995 && fcode
!= BUILT_IN_FREE
5996 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5997 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5998 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5999 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
6000 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6001 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
6002 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6003 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6004 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6005 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
6006 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
6007 && fcode
!= BUILT_IN_CHKP_BNDRET
)
6008 return expand_call (exp
, target
, ignore
);
6010 /* The built-in function expanders test for target == const0_rtx
6011 to determine whether the function's result will be ignored. */
6013 target
= const0_rtx
;
6015 /* If the result of a pure or const built-in function is ignored, and
6016 none of its arguments are volatile, we can avoid expanding the
6017 built-in call and just evaluate the arguments for side-effects. */
6018 if (target
== const0_rtx
6019 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6020 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6022 bool volatilep
= false;
6024 call_expr_arg_iterator iter
;
6026 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6027 if (TREE_THIS_VOLATILE (arg
))
6035 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6036 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6041 /* expand_builtin_with_bounds is supposed to be used for
6042 instrumented builtin calls. */
6043 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6047 CASE_FLT_FN (BUILT_IN_FABS
):
6048 case BUILT_IN_FABSD32
:
6049 case BUILT_IN_FABSD64
:
6050 case BUILT_IN_FABSD128
:
6051 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6056 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6057 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6062 /* Just do a normal library call if we were unable to fold
6064 CASE_FLT_FN (BUILT_IN_CABS
):
6067 CASE_FLT_FN (BUILT_IN_EXP
):
6068 CASE_FLT_FN (BUILT_IN_EXP10
):
6069 CASE_FLT_FN (BUILT_IN_POW10
):
6070 CASE_FLT_FN (BUILT_IN_EXP2
):
6071 CASE_FLT_FN (BUILT_IN_EXPM1
):
6072 CASE_FLT_FN (BUILT_IN_LOGB
):
6073 CASE_FLT_FN (BUILT_IN_LOG
):
6074 CASE_FLT_FN (BUILT_IN_LOG10
):
6075 CASE_FLT_FN (BUILT_IN_LOG2
):
6076 CASE_FLT_FN (BUILT_IN_LOG1P
):
6077 CASE_FLT_FN (BUILT_IN_TAN
):
6078 CASE_FLT_FN (BUILT_IN_ASIN
):
6079 CASE_FLT_FN (BUILT_IN_ACOS
):
6080 CASE_FLT_FN (BUILT_IN_ATAN
):
6081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
6082 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6083 because of possible accuracy problems. */
6084 if (! flag_unsafe_math_optimizations
)
6086 CASE_FLT_FN (BUILT_IN_SQRT
):
6087 CASE_FLT_FN (BUILT_IN_FLOOR
):
6088 CASE_FLT_FN (BUILT_IN_CEIL
):
6089 CASE_FLT_FN (BUILT_IN_TRUNC
):
6090 CASE_FLT_FN (BUILT_IN_ROUND
):
6091 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6092 CASE_FLT_FN (BUILT_IN_RINT
):
6093 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6098 CASE_FLT_FN (BUILT_IN_FMA
):
6099 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6104 CASE_FLT_FN (BUILT_IN_ILOGB
):
6105 if (! flag_unsafe_math_optimizations
)
6107 CASE_FLT_FN (BUILT_IN_ISINF
):
6108 CASE_FLT_FN (BUILT_IN_FINITE
):
6109 case BUILT_IN_ISFINITE
:
6110 case BUILT_IN_ISNORMAL
:
6111 target
= expand_builtin_interclass_mathfn (exp
, target
);
6116 CASE_FLT_FN (BUILT_IN_ICEIL
):
6117 CASE_FLT_FN (BUILT_IN_LCEIL
):
6118 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6119 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6120 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6121 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6122 target
= expand_builtin_int_roundingfn (exp
, target
);
6127 CASE_FLT_FN (BUILT_IN_IRINT
):
6128 CASE_FLT_FN (BUILT_IN_LRINT
):
6129 CASE_FLT_FN (BUILT_IN_LLRINT
):
6130 CASE_FLT_FN (BUILT_IN_IROUND
):
6131 CASE_FLT_FN (BUILT_IN_LROUND
):
6132 CASE_FLT_FN (BUILT_IN_LLROUND
):
6133 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6138 CASE_FLT_FN (BUILT_IN_POWI
):
6139 target
= expand_builtin_powi (exp
, target
);
6144 CASE_FLT_FN (BUILT_IN_ATAN2
):
6145 CASE_FLT_FN (BUILT_IN_LDEXP
):
6146 CASE_FLT_FN (BUILT_IN_SCALB
):
6147 CASE_FLT_FN (BUILT_IN_SCALBN
):
6148 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6149 if (! flag_unsafe_math_optimizations
)
6152 CASE_FLT_FN (BUILT_IN_FMOD
):
6153 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6154 CASE_FLT_FN (BUILT_IN_DREM
):
6155 CASE_FLT_FN (BUILT_IN_POW
):
6156 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6161 CASE_FLT_FN (BUILT_IN_CEXPI
):
6162 target
= expand_builtin_cexpi (exp
, target
);
6163 gcc_assert (target
);
6166 CASE_FLT_FN (BUILT_IN_SIN
):
6167 CASE_FLT_FN (BUILT_IN_COS
):
6168 if (! flag_unsafe_math_optimizations
)
6170 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6175 CASE_FLT_FN (BUILT_IN_SINCOS
):
6176 if (! flag_unsafe_math_optimizations
)
6178 target
= expand_builtin_sincos (exp
);
6183 case BUILT_IN_APPLY_ARGS
:
6184 return expand_builtin_apply_args ();
6186 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6187 FUNCTION with a copy of the parameters described by
6188 ARGUMENTS, and ARGSIZE. It returns a block of memory
6189 allocated on the stack into which is stored all the registers
6190 that might possibly be used for returning the result of a
6191 function. ARGUMENTS is the value returned by
6192 __builtin_apply_args. ARGSIZE is the number of bytes of
6193 arguments that must be copied. ??? How should this value be
6194 computed? We'll also need a safe worst case value for varargs
6196 case BUILT_IN_APPLY
:
6197 if (!validate_arglist (exp
, POINTER_TYPE
,
6198 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6199 && !validate_arglist (exp
, REFERENCE_TYPE
,
6200 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6206 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6207 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6208 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6210 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6213 /* __builtin_return (RESULT) causes the function to return the
6214 value described by RESULT. RESULT is address of the block of
6215 memory returned by __builtin_apply. */
6216 case BUILT_IN_RETURN
:
6217 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6218 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6221 case BUILT_IN_SAVEREGS
:
6222 return expand_builtin_saveregs ();
6224 case BUILT_IN_VA_ARG_PACK
:
6225 /* All valid uses of __builtin_va_arg_pack () are removed during
6227 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6230 case BUILT_IN_VA_ARG_PACK_LEN
:
6231 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6233 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6236 /* Return the address of the first anonymous stack arg. */
6237 case BUILT_IN_NEXT_ARG
:
6238 if (fold_builtin_next_arg (exp
, false))
6240 return expand_builtin_next_arg ();
6242 case BUILT_IN_CLEAR_CACHE
:
6243 target
= expand_builtin___clear_cache (exp
);
6248 case BUILT_IN_CLASSIFY_TYPE
:
6249 return expand_builtin_classify_type (exp
);
6251 case BUILT_IN_CONSTANT_P
:
6254 case BUILT_IN_FRAME_ADDRESS
:
6255 case BUILT_IN_RETURN_ADDRESS
:
6256 return expand_builtin_frame_address (fndecl
, exp
);
6258 /* Returns the address of the area where the structure is returned.
6260 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6261 if (call_expr_nargs (exp
) != 0
6262 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6263 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6266 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6268 case BUILT_IN_ALLOCA
:
6269 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6270 /* If the allocation stems from the declaration of a variable-sized
6271 object, it cannot accumulate. */
6272 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6277 case BUILT_IN_STACK_SAVE
:
6278 return expand_stack_save ();
6280 case BUILT_IN_STACK_RESTORE
:
6281 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6284 case BUILT_IN_BSWAP16
:
6285 case BUILT_IN_BSWAP32
:
6286 case BUILT_IN_BSWAP64
:
6287 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6292 CASE_INT_FN (BUILT_IN_FFS
):
6293 target
= expand_builtin_unop (target_mode
, exp
, target
,
6294 subtarget
, ffs_optab
);
6299 CASE_INT_FN (BUILT_IN_CLZ
):
6300 target
= expand_builtin_unop (target_mode
, exp
, target
,
6301 subtarget
, clz_optab
);
6306 CASE_INT_FN (BUILT_IN_CTZ
):
6307 target
= expand_builtin_unop (target_mode
, exp
, target
,
6308 subtarget
, ctz_optab
);
6313 CASE_INT_FN (BUILT_IN_CLRSB
):
6314 target
= expand_builtin_unop (target_mode
, exp
, target
,
6315 subtarget
, clrsb_optab
);
6320 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6321 target
= expand_builtin_unop (target_mode
, exp
, target
,
6322 subtarget
, popcount_optab
);
6327 CASE_INT_FN (BUILT_IN_PARITY
):
6328 target
= expand_builtin_unop (target_mode
, exp
, target
,
6329 subtarget
, parity_optab
);
6334 case BUILT_IN_STRLEN
:
6335 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6340 case BUILT_IN_STRCPY
:
6341 target
= expand_builtin_strcpy (exp
, target
);
6346 case BUILT_IN_STRNCPY
:
6347 target
= expand_builtin_strncpy (exp
, target
);
6352 case BUILT_IN_STPCPY
:
6353 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6358 case BUILT_IN_MEMCPY
:
6359 target
= expand_builtin_memcpy (exp
, target
);
6364 case BUILT_IN_MEMPCPY
:
6365 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6370 case BUILT_IN_MEMSET
:
6371 target
= expand_builtin_memset (exp
, target
, mode
);
6376 case BUILT_IN_BZERO
:
6377 target
= expand_builtin_bzero (exp
);
6382 case BUILT_IN_STRCMP
:
6383 target
= expand_builtin_strcmp (exp
, target
);
6388 case BUILT_IN_STRNCMP
:
6389 target
= expand_builtin_strncmp (exp
, target
, mode
);
6395 case BUILT_IN_MEMCMP
:
6396 target
= expand_builtin_memcmp (exp
, target
, mode
);
6401 case BUILT_IN_SETJMP
:
6402 /* This should have been lowered to the builtins below. */
6405 case BUILT_IN_SETJMP_SETUP
:
6406 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6407 and the receiver label. */
6408 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6410 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6411 VOIDmode
, EXPAND_NORMAL
);
6412 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6413 rtx_insn
*label_r
= label_rtx (label
);
6415 /* This is copied from the handling of non-local gotos. */
6416 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6417 nonlocal_goto_handler_labels
6418 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6419 nonlocal_goto_handler_labels
);
6420 /* ??? Do not let expand_label treat us as such since we would
6421 not want to be both on the list of non-local labels and on
6422 the list of forced labels. */
6423 FORCED_LABEL (label
) = 0;
6428 case BUILT_IN_SETJMP_RECEIVER
:
6429 /* __builtin_setjmp_receiver is passed the receiver label. */
6430 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6432 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6433 rtx_insn
*label_r
= label_rtx (label
);
6435 expand_builtin_setjmp_receiver (label_r
);
6440 /* __builtin_longjmp is passed a pointer to an array of five words.
6441 It's similar to the C library longjmp function but works with
6442 __builtin_setjmp above. */
6443 case BUILT_IN_LONGJMP
:
6444 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6446 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6447 VOIDmode
, EXPAND_NORMAL
);
6448 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6450 if (value
!= const1_rtx
)
6452 error ("%<__builtin_longjmp%> second argument must be 1");
6456 expand_builtin_longjmp (buf_addr
, value
);
6461 case BUILT_IN_NONLOCAL_GOTO
:
6462 target
= expand_builtin_nonlocal_goto (exp
);
6467 /* This updates the setjmp buffer that is its argument with the value
6468 of the current stack pointer. */
6469 case BUILT_IN_UPDATE_SETJMP_BUF
:
6470 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6473 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6475 expand_builtin_update_setjmp_buf (buf_addr
);
6481 expand_builtin_trap ();
6484 case BUILT_IN_UNREACHABLE
:
6485 expand_builtin_unreachable ();
6488 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6489 case BUILT_IN_SIGNBITD32
:
6490 case BUILT_IN_SIGNBITD64
:
6491 case BUILT_IN_SIGNBITD128
:
6492 target
= expand_builtin_signbit (exp
, target
);
6497 /* Various hooks for the DWARF 2 __throw routine. */
6498 case BUILT_IN_UNWIND_INIT
:
6499 expand_builtin_unwind_init ();
6501 case BUILT_IN_DWARF_CFA
:
6502 return virtual_cfa_rtx
;
6503 #ifdef DWARF2_UNWIND_INFO
6504 case BUILT_IN_DWARF_SP_COLUMN
:
6505 return expand_builtin_dwarf_sp_column ();
6506 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6507 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6510 case BUILT_IN_FROB_RETURN_ADDR
:
6511 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6512 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6513 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6514 case BUILT_IN_EH_RETURN
:
6515 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6516 CALL_EXPR_ARG (exp
, 1));
6518 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6519 return expand_builtin_eh_return_data_regno (exp
);
6520 case BUILT_IN_EXTEND_POINTER
:
6521 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6522 case BUILT_IN_EH_POINTER
:
6523 return expand_builtin_eh_pointer (exp
);
6524 case BUILT_IN_EH_FILTER
:
6525 return expand_builtin_eh_filter (exp
);
6526 case BUILT_IN_EH_COPY_VALUES
:
6527 return expand_builtin_eh_copy_values (exp
);
6529 case BUILT_IN_VA_START
:
6530 return expand_builtin_va_start (exp
);
6531 case BUILT_IN_VA_END
:
6532 return expand_builtin_va_end (exp
);
6533 case BUILT_IN_VA_COPY
:
6534 return expand_builtin_va_copy (exp
);
6535 case BUILT_IN_EXPECT
:
6536 return expand_builtin_expect (exp
, target
);
6537 case BUILT_IN_ASSUME_ALIGNED
:
6538 return expand_builtin_assume_aligned (exp
, target
);
6539 case BUILT_IN_PREFETCH
:
6540 expand_builtin_prefetch (exp
);
6543 case BUILT_IN_INIT_TRAMPOLINE
:
6544 return expand_builtin_init_trampoline (exp
, true);
6545 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6546 return expand_builtin_init_trampoline (exp
, false);
6547 case BUILT_IN_ADJUST_TRAMPOLINE
:
6548 return expand_builtin_adjust_trampoline (exp
);
6551 case BUILT_IN_EXECL
:
6552 case BUILT_IN_EXECV
:
6553 case BUILT_IN_EXECLP
:
6554 case BUILT_IN_EXECLE
:
6555 case BUILT_IN_EXECVP
:
6556 case BUILT_IN_EXECVE
:
6557 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6562 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6563 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6564 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6565 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6566 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6567 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6568 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6573 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6574 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6575 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6576 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6577 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6578 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6579 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6584 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6585 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6586 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6587 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6588 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6589 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6590 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6595 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6596 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6597 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6598 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6599 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6600 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6601 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6606 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6607 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6608 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6609 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6610 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6611 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6612 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6617 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6618 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6619 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6620 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6621 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6622 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6623 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6628 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6629 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6630 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6631 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6632 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6633 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6634 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6639 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6640 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6641 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6642 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6643 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6644 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6645 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6650 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6651 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6652 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6653 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6654 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6655 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6656 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6661 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6662 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6663 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6664 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6665 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6666 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6667 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6672 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6673 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6674 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6675 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6676 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6677 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6678 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6683 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6684 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6685 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6686 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6687 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6688 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6689 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6694 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6695 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6696 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6697 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6698 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6699 if (mode
== VOIDmode
)
6700 mode
= TYPE_MODE (boolean_type_node
);
6701 if (!target
|| !register_operand (target
, mode
))
6702 target
= gen_reg_rtx (mode
);
6704 mode
= get_builtin_sync_mode
6705 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6706 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6711 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6712 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6713 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6714 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6715 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6716 mode
= get_builtin_sync_mode
6717 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6718 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6723 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6724 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6725 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6726 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6727 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6728 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6729 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6734 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6735 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6736 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6737 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6738 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6739 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6740 expand_builtin_sync_lock_release (mode
, exp
);
6743 case BUILT_IN_SYNC_SYNCHRONIZE
:
6744 expand_builtin_sync_synchronize ();
6747 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6748 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6749 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6750 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6751 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6752 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6753 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6758 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6759 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6760 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6761 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6762 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6764 unsigned int nargs
, z
;
6765 vec
<tree
, va_gc
> *vec
;
6768 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6769 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6773 /* If this is turned into an external library call, the weak parameter
6774 must be dropped to match the expected parameter list. */
6775 nargs
= call_expr_nargs (exp
);
6776 vec_alloc (vec
, nargs
- 1);
6777 for (z
= 0; z
< 3; z
++)
6778 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6779 /* Skip the boolean weak parameter. */
6780 for (z
= 4; z
< 6; z
++)
6781 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6782 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6786 case BUILT_IN_ATOMIC_LOAD_1
:
6787 case BUILT_IN_ATOMIC_LOAD_2
:
6788 case BUILT_IN_ATOMIC_LOAD_4
:
6789 case BUILT_IN_ATOMIC_LOAD_8
:
6790 case BUILT_IN_ATOMIC_LOAD_16
:
6791 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6792 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6797 case BUILT_IN_ATOMIC_STORE_1
:
6798 case BUILT_IN_ATOMIC_STORE_2
:
6799 case BUILT_IN_ATOMIC_STORE_4
:
6800 case BUILT_IN_ATOMIC_STORE_8
:
6801 case BUILT_IN_ATOMIC_STORE_16
:
6802 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6803 target
= expand_builtin_atomic_store (mode
, exp
);
6808 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6809 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6810 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6811 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6812 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6814 enum built_in_function lib
;
6815 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6816 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6817 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6818 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6824 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6825 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6826 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6827 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6828 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6830 enum built_in_function lib
;
6831 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6832 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6833 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6834 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6840 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6841 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6842 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6843 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6844 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6846 enum built_in_function lib
;
6847 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6848 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6849 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6850 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6856 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6857 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6858 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6859 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6860 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6862 enum built_in_function lib
;
6863 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6864 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6865 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6866 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6872 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6873 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6874 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6875 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6876 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6878 enum built_in_function lib
;
6879 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6880 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6881 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6882 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6888 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6889 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6890 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6891 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6892 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6894 enum built_in_function lib
;
6895 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6896 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6897 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6898 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6904 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6905 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6906 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6907 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6908 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6909 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6910 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6911 ignore
, BUILT_IN_NONE
);
6916 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6917 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6918 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6919 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6920 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6921 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6922 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6923 ignore
, BUILT_IN_NONE
);
6928 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6929 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6930 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6931 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6932 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6933 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6934 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6935 ignore
, BUILT_IN_NONE
);
6940 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6941 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6942 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6943 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6944 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6945 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6946 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6947 ignore
, BUILT_IN_NONE
);
6952 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6953 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6954 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6955 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6956 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6957 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6958 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6959 ignore
, BUILT_IN_NONE
);
6964 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6965 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6966 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6967 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6968 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6969 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6970 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6971 ignore
, BUILT_IN_NONE
);
6976 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6977 return expand_builtin_atomic_test_and_set (exp
, target
);
6979 case BUILT_IN_ATOMIC_CLEAR
:
6980 return expand_builtin_atomic_clear (exp
);
6982 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6983 return expand_builtin_atomic_always_lock_free (exp
);
6985 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6986 target
= expand_builtin_atomic_is_lock_free (exp
);
6991 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6992 expand_builtin_atomic_thread_fence (exp
);
6995 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6996 expand_builtin_atomic_signal_fence (exp
);
6999 case BUILT_IN_OBJECT_SIZE
:
7000 return expand_builtin_object_size (exp
);
7002 case BUILT_IN_MEMCPY_CHK
:
7003 case BUILT_IN_MEMPCPY_CHK
:
7004 case BUILT_IN_MEMMOVE_CHK
:
7005 case BUILT_IN_MEMSET_CHK
:
7006 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7011 case BUILT_IN_STRCPY_CHK
:
7012 case BUILT_IN_STPCPY_CHK
:
7013 case BUILT_IN_STRNCPY_CHK
:
7014 case BUILT_IN_STPNCPY_CHK
:
7015 case BUILT_IN_STRCAT_CHK
:
7016 case BUILT_IN_STRNCAT_CHK
:
7017 case BUILT_IN_SNPRINTF_CHK
:
7018 case BUILT_IN_VSNPRINTF_CHK
:
7019 maybe_emit_chk_warning (exp
, fcode
);
7022 case BUILT_IN_SPRINTF_CHK
:
7023 case BUILT_IN_VSPRINTF_CHK
:
7024 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7028 if (warn_free_nonheap_object
)
7029 maybe_emit_free_warning (exp
);
7032 case BUILT_IN_THREAD_POINTER
:
7033 return expand_builtin_thread_pointer (exp
, target
);
7035 case BUILT_IN_SET_THREAD_POINTER
:
7036 expand_builtin_set_thread_pointer (exp
);
7039 case BUILT_IN_CILK_DETACH
:
7040 expand_builtin_cilk_detach (exp
);
7043 case BUILT_IN_CILK_POP_FRAME
:
7044 expand_builtin_cilk_pop_frame (exp
);
7047 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7048 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7049 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7050 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7051 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7052 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7053 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7054 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7055 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7056 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7057 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7058 /* We allow user CHKP builtins if Pointer Bounds
7060 if (!chkp_function_instrumented_p (current_function_decl
))
7062 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7063 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7064 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7065 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7066 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7067 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7068 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7069 return expand_normal (size_zero_node
);
7070 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7071 return expand_normal (size_int (-1));
7077 case BUILT_IN_CHKP_BNDMK
:
7078 case BUILT_IN_CHKP_BNDSTX
:
7079 case BUILT_IN_CHKP_BNDCL
:
7080 case BUILT_IN_CHKP_BNDCU
:
7081 case BUILT_IN_CHKP_BNDLDX
:
7082 case BUILT_IN_CHKP_BNDRET
:
7083 case BUILT_IN_CHKP_INTERSECT
:
7084 case BUILT_IN_CHKP_NARROW
:
7085 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7086 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7087 /* Software implementation of Pointer Bounds Checker is NYI.
7088 Target support is required. */
7089 error ("Your target platform does not support -fcheck-pointer-bounds");
7092 case BUILT_IN_ACC_ON_DEVICE
:
7093 target
= expand_builtin_acc_on_device (exp
, target
);
7098 default: /* just do library call, if unknown builtin */
7102 /* The switch statement above can drop through to cause the function
7103 to be called normally. */
7104 return expand_call (exp
, target
, ignore
);
7107 /* Similar to expand_builtin but is used for instrumented calls. */
7110 expand_builtin_with_bounds (tree exp
, rtx target
,
7111 rtx subtarget ATTRIBUTE_UNUSED
,
7112 machine_mode mode
, int ignore
)
7114 tree fndecl
= get_callee_fndecl (exp
);
7115 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7117 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7119 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7120 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7122 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7123 && fcode
< END_CHKP_BUILTINS
);
7127 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7128 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7133 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7134 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7139 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7140 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7149 /* The switch statement above can drop through to cause the function
7150 to be called normally. */
7151 return expand_call (exp
, target
, ignore
);
7154 /* Determine whether a tree node represents a call to a built-in
7155 function. If the tree T is a call to a built-in function with
7156 the right number of arguments of the appropriate types, return
7157 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7158 Otherwise the return value is END_BUILTINS. */
7160 enum built_in_function
7161 builtin_mathfn_code (const_tree t
)
7163 const_tree fndecl
, arg
, parmlist
;
7164 const_tree argtype
, parmtype
;
7165 const_call_expr_arg_iterator iter
;
7167 if (TREE_CODE (t
) != CALL_EXPR
7168 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7169 return END_BUILTINS
;
7171 fndecl
= get_callee_fndecl (t
);
7172 if (fndecl
== NULL_TREE
7173 || TREE_CODE (fndecl
) != FUNCTION_DECL
7174 || ! DECL_BUILT_IN (fndecl
)
7175 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7176 return END_BUILTINS
;
7178 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7179 init_const_call_expr_arg_iterator (t
, &iter
);
7180 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7182 /* If a function doesn't take a variable number of arguments,
7183 the last element in the list will have type `void'. */
7184 parmtype
= TREE_VALUE (parmlist
);
7185 if (VOID_TYPE_P (parmtype
))
7187 if (more_const_call_expr_args_p (&iter
))
7188 return END_BUILTINS
;
7189 return DECL_FUNCTION_CODE (fndecl
);
7192 if (! more_const_call_expr_args_p (&iter
))
7193 return END_BUILTINS
;
7195 arg
= next_const_call_expr_arg (&iter
);
7196 argtype
= TREE_TYPE (arg
);
7198 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7200 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7201 return END_BUILTINS
;
7203 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7205 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7206 return END_BUILTINS
;
7208 else if (POINTER_TYPE_P (parmtype
))
7210 if (! POINTER_TYPE_P (argtype
))
7211 return END_BUILTINS
;
7213 else if (INTEGRAL_TYPE_P (parmtype
))
7215 if (! INTEGRAL_TYPE_P (argtype
))
7216 return END_BUILTINS
;
7219 return END_BUILTINS
;
7222 /* Variable-length argument list. */
7223 return DECL_FUNCTION_CODE (fndecl
);
7226 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7227 evaluate to a constant. */
7230 fold_builtin_constant_p (tree arg
)
7232 /* We return 1 for a numeric type that's known to be a constant
7233 value at compile-time or for an aggregate type that's a
7234 literal constant. */
7237 /* If we know this is a constant, emit the constant of one. */
7238 if (CONSTANT_CLASS_P (arg
)
7239 || (TREE_CODE (arg
) == CONSTRUCTOR
7240 && TREE_CONSTANT (arg
)))
7241 return integer_one_node
;
7242 if (TREE_CODE (arg
) == ADDR_EXPR
)
7244 tree op
= TREE_OPERAND (arg
, 0);
7245 if (TREE_CODE (op
) == STRING_CST
7246 || (TREE_CODE (op
) == ARRAY_REF
7247 && integer_zerop (TREE_OPERAND (op
, 1))
7248 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7249 return integer_one_node
;
7252 /* If this expression has side effects, show we don't know it to be a
7253 constant. Likewise if it's a pointer or aggregate type since in
7254 those case we only want literals, since those are only optimized
7255 when generating RTL, not later.
7256 And finally, if we are compiling an initializer, not code, we
7257 need to return a definite result now; there's not going to be any
7258 more optimization done. */
7259 if (TREE_SIDE_EFFECTS (arg
)
7260 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7261 || POINTER_TYPE_P (TREE_TYPE (arg
))
7263 || folding_initializer
7264 || force_folding_builtin_constant_p
)
7265 return integer_zero_node
;
7270 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7271 return it as a truthvalue. */
7274 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7277 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7279 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7280 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7281 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7282 pred_type
= TREE_VALUE (arg_types
);
7283 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7285 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7286 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7287 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7290 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7291 build_int_cst (ret_type
, 0));
7294 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7295 NULL_TREE if no simplification is possible. */
7298 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7300 tree inner
, fndecl
, inner_arg0
;
7301 enum tree_code code
;
7303 /* Distribute the expected value over short-circuiting operators.
7304 See through the cast from truthvalue_type_node to long. */
7306 while (CONVERT_EXPR_P (inner_arg0
)
7307 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7308 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7309 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7311 /* If this is a builtin_expect within a builtin_expect keep the
7312 inner one. See through a comparison against a constant. It
7313 might have been added to create a thruthvalue. */
7316 if (COMPARISON_CLASS_P (inner
)
7317 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7318 inner
= TREE_OPERAND (inner
, 0);
7320 if (TREE_CODE (inner
) == CALL_EXPR
7321 && (fndecl
= get_callee_fndecl (inner
))
7322 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7323 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7327 code
= TREE_CODE (inner
);
7328 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7330 tree op0
= TREE_OPERAND (inner
, 0);
7331 tree op1
= TREE_OPERAND (inner
, 1);
7333 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7334 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7335 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7337 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7340 /* If the argument isn't invariant then there's nothing else we can do. */
7341 if (!TREE_CONSTANT (inner_arg0
))
7344 /* If we expect that a comparison against the argument will fold to
7345 a constant return the constant. In practice, this means a true
7346 constant or the address of a non-weak symbol. */
7349 if (TREE_CODE (inner
) == ADDR_EXPR
)
7353 inner
= TREE_OPERAND (inner
, 0);
7355 while (TREE_CODE (inner
) == COMPONENT_REF
7356 || TREE_CODE (inner
) == ARRAY_REF
);
7357 if ((TREE_CODE (inner
) == VAR_DECL
7358 || TREE_CODE (inner
) == FUNCTION_DECL
)
7359 && DECL_WEAK (inner
))
7363 /* Otherwise, ARG0 already has the proper type for the return value. */
7367 /* Fold a call to __builtin_classify_type with argument ARG. */
7370 fold_builtin_classify_type (tree arg
)
7373 return build_int_cst (integer_type_node
, no_type_class
);
7375 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7378 /* Fold a call to __builtin_strlen with argument ARG. */
7381 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7383 if (!validate_arg (arg
, POINTER_TYPE
))
7387 tree len
= c_strlen (arg
, 0);
7390 return fold_convert_loc (loc
, type
, len
);
7396 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7399 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7401 REAL_VALUE_TYPE real
;
7403 /* __builtin_inff is intended to be usable to define INFINITY on all
7404 targets. If an infinity is not available, INFINITY expands "to a
7405 positive constant of type float that overflows at translation
7406 time", footnote "In this case, using INFINITY will violate the
7407 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7408 Thus we pedwarn to ensure this constraint violation is
7410 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7411 pedwarn (loc
, 0, "target format does not support infinity");
7414 return build_real (type
, real
);
7417 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7420 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7422 REAL_VALUE_TYPE real
;
7425 if (!validate_arg (arg
, POINTER_TYPE
))
7427 str
= c_getstr (arg
);
7431 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7434 return build_real (type
, real
);
7437 /* Return true if the floating point expression T has an integer value.
7438 We also allow +Inf, -Inf and NaN to be considered integer values. */
7441 integer_valued_real_p (tree t
)
7443 switch (TREE_CODE (t
))
7450 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7455 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7462 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7463 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7466 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7467 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7470 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7474 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7475 if (TREE_CODE (type
) == INTEGER_TYPE
)
7477 if (TREE_CODE (type
) == REAL_TYPE
)
7478 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7483 switch (builtin_mathfn_code (t
))
7485 CASE_FLT_FN (BUILT_IN_CEIL
):
7486 CASE_FLT_FN (BUILT_IN_FLOOR
):
7487 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7488 CASE_FLT_FN (BUILT_IN_RINT
):
7489 CASE_FLT_FN (BUILT_IN_ROUND
):
7490 CASE_FLT_FN (BUILT_IN_TRUNC
):
7493 CASE_FLT_FN (BUILT_IN_FMIN
):
7494 CASE_FLT_FN (BUILT_IN_FMAX
):
7495 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7496 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7509 /* FNDECL is assumed to be a builtin where truncation can be propagated
7510 across (for instance floor((double)f) == (double)floorf (f).
7511 Do the transformation for a call with argument ARG. */
7514 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7516 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7518 if (!validate_arg (arg
, REAL_TYPE
))
7521 /* Integer rounding functions are idempotent. */
7522 if (fcode
== builtin_mathfn_code (arg
))
7525 /* If argument is already integer valued, and we don't need to worry
7526 about setting errno, there's no need to perform rounding. */
7527 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7532 tree arg0
= strip_float_extensions (arg
);
7533 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7534 tree newtype
= TREE_TYPE (arg0
);
7537 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7538 && (decl
= mathfn_built_in (newtype
, fcode
)))
7539 return fold_convert_loc (loc
, ftype
,
7540 build_call_expr_loc (loc
, decl
, 1,
7541 fold_convert_loc (loc
,
7548 /* FNDECL is assumed to be builtin which can narrow the FP type of
7549 the argument, for instance lround((double)f) -> lroundf (f).
7550 Do the transformation for a call with argument ARG. */
7553 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7555 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7557 if (!validate_arg (arg
, REAL_TYPE
))
7560 /* If argument is already integer valued, and we don't need to worry
7561 about setting errno, there's no need to perform rounding. */
7562 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7563 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7564 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7568 tree ftype
= TREE_TYPE (arg
);
7569 tree arg0
= strip_float_extensions (arg
);
7570 tree newtype
= TREE_TYPE (arg0
);
7573 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7574 && (decl
= mathfn_built_in (newtype
, fcode
)))
7575 return build_call_expr_loc (loc
, decl
, 1,
7576 fold_convert_loc (loc
, newtype
, arg0
));
7579 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7580 sizeof (int) == sizeof (long). */
7581 if (TYPE_PRECISION (integer_type_node
)
7582 == TYPE_PRECISION (long_integer_type_node
))
7584 tree newfn
= NULL_TREE
;
7587 CASE_FLT_FN (BUILT_IN_ICEIL
):
7588 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7591 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7592 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7595 CASE_FLT_FN (BUILT_IN_IROUND
):
7596 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7599 CASE_FLT_FN (BUILT_IN_IRINT
):
7600 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7609 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7610 return fold_convert_loc (loc
,
7611 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7615 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7616 sizeof (long long) == sizeof (long). */
7617 if (TYPE_PRECISION (long_long_integer_type_node
)
7618 == TYPE_PRECISION (long_integer_type_node
))
7620 tree newfn
= NULL_TREE
;
7623 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7624 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7627 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7628 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7631 CASE_FLT_FN (BUILT_IN_LLROUND
):
7632 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7635 CASE_FLT_FN (BUILT_IN_LLRINT
):
7636 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7645 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7646 return fold_convert_loc (loc
,
7647 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7654 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7655 return type. Return NULL_TREE if no simplification can be made. */
7658 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7662 if (!validate_arg (arg
, COMPLEX_TYPE
)
7663 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7666 /* Calculate the result when the argument is a constant. */
7667 if (TREE_CODE (arg
) == COMPLEX_CST
7668 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7672 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7674 tree real
= TREE_OPERAND (arg
, 0);
7675 tree imag
= TREE_OPERAND (arg
, 1);
7677 /* If either part is zero, cabs is fabs of the other. */
7678 if (real_zerop (real
))
7679 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7680 if (real_zerop (imag
))
7681 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7683 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7684 if (flag_unsafe_math_optimizations
7685 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7687 const REAL_VALUE_TYPE sqrt2_trunc
7688 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7690 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7691 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7692 build_real (type
, sqrt2_trunc
));
7696 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7697 if (TREE_CODE (arg
) == NEGATE_EXPR
7698 || TREE_CODE (arg
) == CONJ_EXPR
)
7699 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7701 /* Don't do this when optimizing for size. */
7702 if (flag_unsafe_math_optimizations
7703 && optimize
&& optimize_function_for_speed_p (cfun
))
7705 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7707 if (sqrtfn
!= NULL_TREE
)
7709 tree rpart
, ipart
, result
;
7711 arg
= builtin_save_expr (arg
);
7713 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7714 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7716 rpart
= builtin_save_expr (rpart
);
7717 ipart
= builtin_save_expr (ipart
);
7719 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7720 fold_build2_loc (loc
, MULT_EXPR
, type
,
7722 fold_build2_loc (loc
, MULT_EXPR
, type
,
7725 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7732 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7733 complex tree type of the result. If NEG is true, the imaginary
7734 zero is negative. */
7737 build_complex_cproj (tree type
, bool neg
)
7739 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7743 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7744 build_real (TREE_TYPE (type
), rzero
));
7747 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7748 return type. Return NULL_TREE if no simplification can be made. */
7751 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7753 if (!validate_arg (arg
, COMPLEX_TYPE
)
7754 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7757 /* If there are no infinities, return arg. */
7758 if (! HONOR_INFINITIES (type
))
7759 return non_lvalue_loc (loc
, arg
);
7761 /* Calculate the result when the argument is a constant. */
7762 if (TREE_CODE (arg
) == COMPLEX_CST
)
7764 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7765 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7767 if (real_isinf (real
) || real_isinf (imag
))
7768 return build_complex_cproj (type
, imag
->sign
);
7772 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7774 tree real
= TREE_OPERAND (arg
, 0);
7775 tree imag
= TREE_OPERAND (arg
, 1);
7780 /* If the real part is inf and the imag part is known to be
7781 nonnegative, return (inf + 0i). Remember side-effects are
7782 possible in the imag part. */
7783 if (TREE_CODE (real
) == REAL_CST
7784 && real_isinf (TREE_REAL_CST_PTR (real
))
7785 && tree_expr_nonnegative_p (imag
))
7786 return omit_one_operand_loc (loc
, type
,
7787 build_complex_cproj (type
, false),
7790 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7791 Remember side-effects are possible in the real part. */
7792 if (TREE_CODE (imag
) == REAL_CST
7793 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7795 omit_one_operand_loc (loc
, type
,
7796 build_complex_cproj (type
, TREE_REAL_CST_PTR
7797 (imag
)->sign
), arg
);
7803 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7804 Return NULL_TREE if no simplification can be made. */
7807 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7810 enum built_in_function fcode
;
7813 if (!validate_arg (arg
, REAL_TYPE
))
7816 /* Calculate the result when the argument is a constant. */
7817 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7820 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7821 fcode
= builtin_mathfn_code (arg
);
7822 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7824 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7825 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7826 CALL_EXPR_ARG (arg
, 0),
7827 build_real (type
, dconsthalf
));
7828 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7831 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7832 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7834 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7838 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7840 /* The inner root was either sqrt or cbrt. */
7841 /* This was a conditional expression but it triggered a bug
7843 REAL_VALUE_TYPE dconstroot
;
7844 if (BUILTIN_SQRT_P (fcode
))
7845 dconstroot
= dconsthalf
;
7847 dconstroot
= dconst_third ();
7849 /* Adjust for the outer root. */
7850 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7851 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7852 tree_root
= build_real (type
, dconstroot
);
7853 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7857 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7858 if (flag_unsafe_math_optimizations
7859 && (fcode
== BUILT_IN_POW
7860 || fcode
== BUILT_IN_POWF
7861 || fcode
== BUILT_IN_POWL
))
7863 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7864 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7865 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7867 if (!tree_expr_nonnegative_p (arg0
))
7868 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7869 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7870 build_real (type
, dconsthalf
));
7871 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7877 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7878 Return NULL_TREE if no simplification can be made. */
7881 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7883 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7886 if (!validate_arg (arg
, REAL_TYPE
))
7889 /* Calculate the result when the argument is a constant. */
7890 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7893 if (flag_unsafe_math_optimizations
)
7895 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7896 if (BUILTIN_EXPONENT_P (fcode
))
7898 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7899 const REAL_VALUE_TYPE third_trunc
=
7900 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7901 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7902 CALL_EXPR_ARG (arg
, 0),
7903 build_real (type
, third_trunc
));
7904 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7907 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7908 if (BUILTIN_SQRT_P (fcode
))
7910 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7914 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7916 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7918 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7919 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7920 tree_root
= build_real (type
, dconstroot
);
7921 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7925 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7926 if (BUILTIN_CBRT_P (fcode
))
7928 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7929 if (tree_expr_nonnegative_p (arg0
))
7931 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7936 REAL_VALUE_TYPE dconstroot
;
7938 real_arithmetic (&dconstroot
, MULT_EXPR
,
7939 dconst_third_ptr (), dconst_third_ptr ());
7940 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7941 tree_root
= build_real (type
, dconstroot
);
7942 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7947 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7948 if (fcode
== BUILT_IN_POW
7949 || fcode
== BUILT_IN_POWF
7950 || fcode
== BUILT_IN_POWL
)
7952 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7953 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7954 if (tree_expr_nonnegative_p (arg00
))
7956 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7957 const REAL_VALUE_TYPE dconstroot
7958 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7959 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7960 build_real (type
, dconstroot
));
7961 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7968 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7969 TYPE is the type of the return value. Return NULL_TREE if no
7970 simplification can be made. */
7973 fold_builtin_cos (location_t loc
,
7974 tree arg
, tree type
, tree fndecl
)
7978 if (!validate_arg (arg
, REAL_TYPE
))
7981 /* Calculate the result when the argument is a constant. */
7982 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7985 /* Optimize cos(-x) into cos (x). */
7986 if ((narg
= fold_strip_sign_ops (arg
)))
7987 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7992 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7993 Return NULL_TREE if no simplification can be made. */
7996 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7998 if (validate_arg (arg
, REAL_TYPE
))
8002 /* Calculate the result when the argument is a constant. */
8003 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
8006 /* Optimize cosh(-x) into cosh (x). */
8007 if ((narg
= fold_strip_sign_ops (arg
)))
8008 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
8014 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8015 argument ARG. TYPE is the type of the return value. Return
8016 NULL_TREE if no simplification can be made. */
8019 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
8022 if (validate_arg (arg
, COMPLEX_TYPE
)
8023 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8027 /* Calculate the result when the argument is a constant. */
8028 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
8031 /* Optimize fn(-x) into fn(x). */
8032 if ((tmp
= fold_strip_sign_ops (arg
)))
8033 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
8039 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8040 Return NULL_TREE if no simplification can be made. */
8043 fold_builtin_tan (tree arg
, tree type
)
8045 enum built_in_function fcode
;
8048 if (!validate_arg (arg
, REAL_TYPE
))
8051 /* Calculate the result when the argument is a constant. */
8052 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
8055 /* Optimize tan(atan(x)) = x. */
8056 fcode
= builtin_mathfn_code (arg
);
8057 if (flag_unsafe_math_optimizations
8058 && (fcode
== BUILT_IN_ATAN
8059 || fcode
== BUILT_IN_ATANF
8060 || fcode
== BUILT_IN_ATANL
))
8061 return CALL_EXPR_ARG (arg
, 0);
8066 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8067 NULL_TREE if no simplification can be made. */
8070 fold_builtin_sincos (location_t loc
,
8071 tree arg0
, tree arg1
, tree arg2
)
8076 if (!validate_arg (arg0
, REAL_TYPE
)
8077 || !validate_arg (arg1
, POINTER_TYPE
)
8078 || !validate_arg (arg2
, POINTER_TYPE
))
8081 type
= TREE_TYPE (arg0
);
8083 /* Calculate the result when the argument is a constant. */
8084 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
8087 /* Canonicalize sincos to cexpi. */
8088 if (!targetm
.libc_has_function (function_c99_math_complex
))
8090 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8094 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8095 call
= builtin_save_expr (call
);
8097 return build2 (COMPOUND_EXPR
, void_type_node
,
8098 build2 (MODIFY_EXPR
, void_type_node
,
8099 build_fold_indirect_ref_loc (loc
, arg1
),
8100 build1 (IMAGPART_EXPR
, type
, call
)),
8101 build2 (MODIFY_EXPR
, void_type_node
,
8102 build_fold_indirect_ref_loc (loc
, arg2
),
8103 build1 (REALPART_EXPR
, type
, call
)));
8106 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8107 NULL_TREE if no simplification can be made. */
8110 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8113 tree realp
, imagp
, ifn
;
8116 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8117 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8120 /* Calculate the result when the argument is a constant. */
8121 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8124 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8126 /* In case we can figure out the real part of arg0 and it is constant zero
8128 if (!targetm
.libc_has_function (function_c99_math_complex
))
8130 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8134 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8135 && real_zerop (realp
))
8137 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8138 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8141 /* In case we can easily decompose real and imaginary parts split cexp
8142 to exp (r) * cexpi (i). */
8143 if (flag_unsafe_math_optimizations
8146 tree rfn
, rcall
, icall
;
8148 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8152 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8156 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8157 icall
= builtin_save_expr (icall
);
8158 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8159 rcall
= builtin_save_expr (rcall
);
8160 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8161 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8163 fold_build1_loc (loc
, REALPART_EXPR
,
8165 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8167 fold_build1_loc (loc
, IMAGPART_EXPR
,
8174 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8175 Return NULL_TREE if no simplification can be made. */
8178 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8180 if (!validate_arg (arg
, REAL_TYPE
))
8183 /* Optimize trunc of constant value. */
8184 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8186 REAL_VALUE_TYPE r
, x
;
8187 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8189 x
= TREE_REAL_CST (arg
);
8190 real_trunc (&r
, TYPE_MODE (type
), &x
);
8191 return build_real (type
, r
);
8194 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8197 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8198 Return NULL_TREE if no simplification can be made. */
8201 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8203 if (!validate_arg (arg
, REAL_TYPE
))
8206 /* Optimize floor of constant value. */
8207 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8211 x
= TREE_REAL_CST (arg
);
8212 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8214 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8217 real_floor (&r
, TYPE_MODE (type
), &x
);
8218 return build_real (type
, r
);
8222 /* Fold floor (x) where x is nonnegative to trunc (x). */
8223 if (tree_expr_nonnegative_p (arg
))
8225 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8227 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8230 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8233 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8234 Return NULL_TREE if no simplification can be made. */
8237 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8239 if (!validate_arg (arg
, REAL_TYPE
))
8242 /* Optimize ceil of constant value. */
8243 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8247 x
= TREE_REAL_CST (arg
);
8248 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8250 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8253 real_ceil (&r
, TYPE_MODE (type
), &x
);
8254 return build_real (type
, r
);
8258 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8261 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8262 Return NULL_TREE if no simplification can be made. */
8265 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8267 if (!validate_arg (arg
, REAL_TYPE
))
8270 /* Optimize round of constant value. */
8271 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8275 x
= TREE_REAL_CST (arg
);
8276 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8278 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8281 real_round (&r
, TYPE_MODE (type
), &x
);
8282 return build_real (type
, r
);
8286 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8289 /* Fold function call to builtin lround, lroundf or lroundl (or the
8290 corresponding long long versions) and other rounding functions. ARG
8291 is the argument to the call. Return NULL_TREE if no simplification
8295 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8297 if (!validate_arg (arg
, REAL_TYPE
))
8300 /* Optimize lround of constant value. */
8301 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8303 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8305 if (real_isfinite (&x
))
8307 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8308 tree ftype
= TREE_TYPE (arg
);
8312 switch (DECL_FUNCTION_CODE (fndecl
))
8314 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8315 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8316 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8317 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8320 CASE_FLT_FN (BUILT_IN_ICEIL
):
8321 CASE_FLT_FN (BUILT_IN_LCEIL
):
8322 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8323 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8326 CASE_FLT_FN (BUILT_IN_IROUND
):
8327 CASE_FLT_FN (BUILT_IN_LROUND
):
8328 CASE_FLT_FN (BUILT_IN_LLROUND
):
8329 real_round (&r
, TYPE_MODE (ftype
), &x
);
8336 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8338 return wide_int_to_tree (itype
, val
);
8342 switch (DECL_FUNCTION_CODE (fndecl
))
8344 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8345 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8346 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8347 if (tree_expr_nonnegative_p (arg
))
8348 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8349 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8354 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8357 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8358 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8359 the argument to the call. Return NULL_TREE if no simplification can
8363 fold_builtin_bitop (tree fndecl
, tree arg
)
8365 if (!validate_arg (arg
, INTEGER_TYPE
))
8368 /* Optimize for constant argument. */
8369 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8371 tree type
= TREE_TYPE (arg
);
8374 switch (DECL_FUNCTION_CODE (fndecl
))
8376 CASE_INT_FN (BUILT_IN_FFS
):
8377 result
= wi::ffs (arg
);
8380 CASE_INT_FN (BUILT_IN_CLZ
):
8381 if (wi::ne_p (arg
, 0))
8382 result
= wi::clz (arg
);
8383 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8384 result
= TYPE_PRECISION (type
);
8387 CASE_INT_FN (BUILT_IN_CTZ
):
8388 if (wi::ne_p (arg
, 0))
8389 result
= wi::ctz (arg
);
8390 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8391 result
= TYPE_PRECISION (type
);
8394 CASE_INT_FN (BUILT_IN_CLRSB
):
8395 result
= wi::clrsb (arg
);
8398 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8399 result
= wi::popcount (arg
);
8402 CASE_INT_FN (BUILT_IN_PARITY
):
8403 result
= wi::parity (arg
);
8410 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8416 /* Fold function call to builtin_bswap and the short, long and long long
8417 variants. Return NULL_TREE if no simplification can be made. */
8419 fold_builtin_bswap (tree fndecl
, tree arg
)
8421 if (! validate_arg (arg
, INTEGER_TYPE
))
8424 /* Optimize constant value. */
8425 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8427 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8429 switch (DECL_FUNCTION_CODE (fndecl
))
8431 case BUILT_IN_BSWAP16
:
8432 case BUILT_IN_BSWAP32
:
8433 case BUILT_IN_BSWAP64
:
8435 signop sgn
= TYPE_SIGN (type
);
8437 wide_int_to_tree (type
,
8438 wide_int::from (arg
, TYPE_PRECISION (type
),
8450 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8451 NULL_TREE if no simplification can be made. */
8454 fold_builtin_hypot (location_t loc
, tree fndecl
,
8455 tree arg0
, tree arg1
, tree type
)
8457 tree res
, narg0
, narg1
;
8459 if (!validate_arg (arg0
, REAL_TYPE
)
8460 || !validate_arg (arg1
, REAL_TYPE
))
8463 /* Calculate the result when the argument is a constant. */
8464 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8467 /* If either argument to hypot has a negate or abs, strip that off.
8468 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8469 narg0
= fold_strip_sign_ops (arg0
);
8470 narg1
= fold_strip_sign_ops (arg1
);
8473 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8474 narg1
? narg1
: arg1
);
8477 /* If either argument is zero, hypot is fabs of the other. */
8478 if (real_zerop (arg0
))
8479 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8480 else if (real_zerop (arg1
))
8481 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8483 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8484 if (flag_unsafe_math_optimizations
8485 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8487 const REAL_VALUE_TYPE sqrt2_trunc
8488 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8489 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8490 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8491 build_real (type
, sqrt2_trunc
));
8498 /* Fold a builtin function call to pow, powf, or powl. Return
8499 NULL_TREE if no simplification can be made. */
8501 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8505 if (!validate_arg (arg0
, REAL_TYPE
)
8506 || !validate_arg (arg1
, REAL_TYPE
))
8509 /* Calculate the result when the argument is a constant. */
8510 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8513 /* Optimize pow(1.0,y) = 1.0. */
8514 if (real_onep (arg0
))
8515 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8517 if (TREE_CODE (arg1
) == REAL_CST
8518 && !TREE_OVERFLOW (arg1
))
8520 REAL_VALUE_TYPE cint
;
8524 c
= TREE_REAL_CST (arg1
);
8526 /* Optimize pow(x,0.0) = 1.0. */
8527 if (REAL_VALUES_EQUAL (c
, dconst0
))
8528 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8531 /* Optimize pow(x,1.0) = x. */
8532 if (REAL_VALUES_EQUAL (c
, dconst1
))
8535 /* Optimize pow(x,-1.0) = 1.0/x. */
8536 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8537 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8538 build_real (type
, dconst1
), arg0
);
8540 /* Optimize pow(x,0.5) = sqrt(x). */
8541 if (flag_unsafe_math_optimizations
8542 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8544 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8546 if (sqrtfn
!= NULL_TREE
)
8547 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8550 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8551 if (flag_unsafe_math_optimizations
)
8553 const REAL_VALUE_TYPE dconstroot
8554 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8556 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8558 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8559 if (cbrtfn
!= NULL_TREE
)
8560 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8564 /* Check for an integer exponent. */
8565 n
= real_to_integer (&c
);
8566 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8567 if (real_identical (&c
, &cint
))
8569 /* Attempt to evaluate pow at compile-time, unless this should
8570 raise an exception. */
8571 if (TREE_CODE (arg0
) == REAL_CST
8572 && !TREE_OVERFLOW (arg0
)
8574 || (!flag_trapping_math
&& !flag_errno_math
)
8575 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8580 x
= TREE_REAL_CST (arg0
);
8581 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8582 if (flag_unsafe_math_optimizations
|| !inexact
)
8583 return build_real (type
, x
);
8586 /* Strip sign ops from even integer powers. */
8587 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8589 tree narg0
= fold_strip_sign_ops (arg0
);
8591 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8596 if (flag_unsafe_math_optimizations
)
8598 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8600 /* Optimize pow(expN(x),y) = expN(x*y). */
8601 if (BUILTIN_EXPONENT_P (fcode
))
8603 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8604 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8605 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8606 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8609 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8610 if (BUILTIN_SQRT_P (fcode
))
8612 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8613 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8614 build_real (type
, dconsthalf
));
8615 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8618 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8619 if (BUILTIN_CBRT_P (fcode
))
8621 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8622 if (tree_expr_nonnegative_p (arg
))
8624 const REAL_VALUE_TYPE dconstroot
8625 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8626 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8627 build_real (type
, dconstroot
));
8628 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8632 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8633 if (fcode
== BUILT_IN_POW
8634 || fcode
== BUILT_IN_POWF
8635 || fcode
== BUILT_IN_POWL
)
8637 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8638 if (tree_expr_nonnegative_p (arg00
))
8640 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8641 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8642 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8650 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8651 Return NULL_TREE if no simplification can be made. */
8653 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8654 tree arg0
, tree arg1
, tree type
)
8656 if (!validate_arg (arg0
, REAL_TYPE
)
8657 || !validate_arg (arg1
, INTEGER_TYPE
))
8660 /* Optimize pow(1.0,y) = 1.0. */
8661 if (real_onep (arg0
))
8662 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8664 if (tree_fits_shwi_p (arg1
))
8666 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8668 /* Evaluate powi at compile-time. */
8669 if (TREE_CODE (arg0
) == REAL_CST
8670 && !TREE_OVERFLOW (arg0
))
8673 x
= TREE_REAL_CST (arg0
);
8674 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8675 return build_real (type
, x
);
8678 /* Optimize pow(x,0) = 1.0. */
8680 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8683 /* Optimize pow(x,1) = x. */
8687 /* Optimize pow(x,-1) = 1.0/x. */
8689 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8690 build_real (type
, dconst1
), arg0
);
8696 /* A subroutine of fold_builtin to fold the various exponent
8697 functions. Return NULL_TREE if no simplification can be made.
8698 FUNC is the corresponding MPFR exponent function. */
8701 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8702 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8704 if (validate_arg (arg
, REAL_TYPE
))
8706 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8709 /* Calculate the result when the argument is a constant. */
8710 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8713 /* Optimize expN(logN(x)) = x. */
8714 if (flag_unsafe_math_optimizations
)
8716 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8718 if ((func
== mpfr_exp
8719 && (fcode
== BUILT_IN_LOG
8720 || fcode
== BUILT_IN_LOGF
8721 || fcode
== BUILT_IN_LOGL
))
8722 || (func
== mpfr_exp2
8723 && (fcode
== BUILT_IN_LOG2
8724 || fcode
== BUILT_IN_LOG2F
8725 || fcode
== BUILT_IN_LOG2L
))
8726 || (func
== mpfr_exp10
8727 && (fcode
== BUILT_IN_LOG10
8728 || fcode
== BUILT_IN_LOG10F
8729 || fcode
== BUILT_IN_LOG10L
)))
8730 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8737 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8738 arguments to the call, and TYPE is its return type.
8739 Return NULL_TREE if no simplification can be made. */
8742 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8744 if (!validate_arg (arg1
, POINTER_TYPE
)
8745 || !validate_arg (arg2
, INTEGER_TYPE
)
8746 || !validate_arg (len
, INTEGER_TYPE
))
8752 if (TREE_CODE (arg2
) != INTEGER_CST
8753 || !tree_fits_uhwi_p (len
))
8756 p1
= c_getstr (arg1
);
8757 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8763 if (target_char_cast (arg2
, &c
))
8766 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8769 return build_int_cst (TREE_TYPE (arg1
), 0);
8771 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8772 return fold_convert_loc (loc
, type
, tem
);
8778 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8779 Return NULL_TREE if no simplification can be made. */
8782 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8784 const char *p1
, *p2
;
8786 if (!validate_arg (arg1
, POINTER_TYPE
)
8787 || !validate_arg (arg2
, POINTER_TYPE
)
8788 || !validate_arg (len
, INTEGER_TYPE
))
8791 /* If the LEN parameter is zero, return zero. */
8792 if (integer_zerop (len
))
8793 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8796 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8797 if (operand_equal_p (arg1
, arg2
, 0))
8798 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8800 p1
= c_getstr (arg1
);
8801 p2
= c_getstr (arg2
);
8803 /* If all arguments are constant, and the value of len is not greater
8804 than the lengths of arg1 and arg2, evaluate at compile-time. */
8805 if (tree_fits_uhwi_p (len
) && p1
&& p2
8806 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8807 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8809 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8812 return integer_one_node
;
8814 return integer_minus_one_node
;
8816 return integer_zero_node
;
8819 /* If len parameter is one, return an expression corresponding to
8820 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8821 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8823 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8824 tree cst_uchar_ptr_node
8825 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8828 = fold_convert_loc (loc
, integer_type_node
,
8829 build1 (INDIRECT_REF
, cst_uchar_node
,
8830 fold_convert_loc (loc
,
8834 = fold_convert_loc (loc
, integer_type_node
,
8835 build1 (INDIRECT_REF
, cst_uchar_node
,
8836 fold_convert_loc (loc
,
8839 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8845 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8846 Return NULL_TREE if no simplification can be made. */
8849 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8851 const char *p1
, *p2
;
8853 if (!validate_arg (arg1
, POINTER_TYPE
)
8854 || !validate_arg (arg2
, POINTER_TYPE
))
8857 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8858 if (operand_equal_p (arg1
, arg2
, 0))
8859 return integer_zero_node
;
8861 p1
= c_getstr (arg1
);
8862 p2
= c_getstr (arg2
);
8866 const int i
= strcmp (p1
, p2
);
8868 return integer_minus_one_node
;
8870 return integer_one_node
;
8872 return integer_zero_node
;
8875 /* If the second arg is "", return *(const unsigned char*)arg1. */
8876 if (p2
&& *p2
== '\0')
8878 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8879 tree cst_uchar_ptr_node
8880 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8882 return fold_convert_loc (loc
, integer_type_node
,
8883 build1 (INDIRECT_REF
, cst_uchar_node
,
8884 fold_convert_loc (loc
,
8889 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8890 if (p1
&& *p1
== '\0')
8892 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8893 tree cst_uchar_ptr_node
8894 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8897 = fold_convert_loc (loc
, integer_type_node
,
8898 build1 (INDIRECT_REF
, cst_uchar_node
,
8899 fold_convert_loc (loc
,
8902 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8908 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8909 Return NULL_TREE if no simplification can be made. */
8912 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8914 const char *p1
, *p2
;
8916 if (!validate_arg (arg1
, POINTER_TYPE
)
8917 || !validate_arg (arg2
, POINTER_TYPE
)
8918 || !validate_arg (len
, INTEGER_TYPE
))
8921 /* If the LEN parameter is zero, return zero. */
8922 if (integer_zerop (len
))
8923 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8926 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8927 if (operand_equal_p (arg1
, arg2
, 0))
8928 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8930 p1
= c_getstr (arg1
);
8931 p2
= c_getstr (arg2
);
8933 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8935 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8937 return integer_one_node
;
8939 return integer_minus_one_node
;
8941 return integer_zero_node
;
8944 /* If the second arg is "", and the length is greater than zero,
8945 return *(const unsigned char*)arg1. */
8946 if (p2
&& *p2
== '\0'
8947 && TREE_CODE (len
) == INTEGER_CST
8948 && tree_int_cst_sgn (len
) == 1)
8950 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8951 tree cst_uchar_ptr_node
8952 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8954 return fold_convert_loc (loc
, integer_type_node
,
8955 build1 (INDIRECT_REF
, cst_uchar_node
,
8956 fold_convert_loc (loc
,
8961 /* If the first arg is "", and the length is greater than zero,
8962 return -*(const unsigned char*)arg2. */
8963 if (p1
&& *p1
== '\0'
8964 && TREE_CODE (len
) == INTEGER_CST
8965 && tree_int_cst_sgn (len
) == 1)
8967 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8968 tree cst_uchar_ptr_node
8969 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8971 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8972 build1 (INDIRECT_REF
, cst_uchar_node
,
8973 fold_convert_loc (loc
,
8976 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8979 /* If len parameter is one, return an expression corresponding to
8980 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8981 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8983 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8984 tree cst_uchar_ptr_node
8985 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8987 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8988 build1 (INDIRECT_REF
, cst_uchar_node
,
8989 fold_convert_loc (loc
,
8992 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8993 build1 (INDIRECT_REF
, cst_uchar_node
,
8994 fold_convert_loc (loc
,
8997 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9003 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9004 ARG. Return NULL_TREE if no simplification can be made. */
9007 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9009 if (!validate_arg (arg
, REAL_TYPE
))
9012 /* If ARG is a compile-time constant, determine the result. */
9013 if (TREE_CODE (arg
) == REAL_CST
9014 && !TREE_OVERFLOW (arg
))
9018 c
= TREE_REAL_CST (arg
);
9019 return (REAL_VALUE_NEGATIVE (c
)
9020 ? build_one_cst (type
)
9021 : build_zero_cst (type
));
9024 /* If ARG is non-negative, the result is always zero. */
9025 if (tree_expr_nonnegative_p (arg
))
9026 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9028 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9029 if (!HONOR_SIGNED_ZEROS (arg
))
9030 return fold_convert (type
,
9031 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9032 build_real (TREE_TYPE (arg
), dconst0
)));
9037 /* Fold function call to builtin copysign, copysignf or copysignl with
9038 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9042 fold_builtin_copysign (location_t loc
, tree fndecl
,
9043 tree arg1
, tree arg2
, tree type
)
9047 if (!validate_arg (arg1
, REAL_TYPE
)
9048 || !validate_arg (arg2
, REAL_TYPE
))
9051 /* copysign(X,X) is X. */
9052 if (operand_equal_p (arg1
, arg2
, 0))
9053 return fold_convert_loc (loc
, type
, arg1
);
9055 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9056 if (TREE_CODE (arg1
) == REAL_CST
9057 && TREE_CODE (arg2
) == REAL_CST
9058 && !TREE_OVERFLOW (arg1
)
9059 && !TREE_OVERFLOW (arg2
))
9061 REAL_VALUE_TYPE c1
, c2
;
9063 c1
= TREE_REAL_CST (arg1
);
9064 c2
= TREE_REAL_CST (arg2
);
9065 /* c1.sign := c2.sign. */
9066 real_copysign (&c1
, &c2
);
9067 return build_real (type
, c1
);
9070 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9071 Remember to evaluate Y for side-effects. */
9072 if (tree_expr_nonnegative_p (arg2
))
9073 return omit_one_operand_loc (loc
, type
,
9074 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9077 /* Strip sign changing operations for the first argument. */
9078 tem
= fold_strip_sign_ops (arg1
);
9080 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9085 /* Fold a call to builtin isascii with argument ARG. */
9088 fold_builtin_isascii (location_t loc
, tree arg
)
9090 if (!validate_arg (arg
, INTEGER_TYPE
))
9094 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9095 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9096 build_int_cst (integer_type_node
,
9097 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9098 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9099 arg
, integer_zero_node
);
9103 /* Fold a call to builtin toascii with argument ARG. */
9106 fold_builtin_toascii (location_t loc
, tree arg
)
9108 if (!validate_arg (arg
, INTEGER_TYPE
))
9111 /* Transform toascii(c) -> (c & 0x7f). */
9112 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9113 build_int_cst (integer_type_node
, 0x7f));
9116 /* Fold a call to builtin isdigit with argument ARG. */
9119 fold_builtin_isdigit (location_t loc
, tree arg
)
9121 if (!validate_arg (arg
, INTEGER_TYPE
))
9125 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9126 /* According to the C standard, isdigit is unaffected by locale.
9127 However, it definitely is affected by the target character set. */
9128 unsigned HOST_WIDE_INT target_digit0
9129 = lang_hooks
.to_target_charset ('0');
9131 if (target_digit0
== 0)
9134 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9135 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9136 build_int_cst (unsigned_type_node
, target_digit0
));
9137 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9138 build_int_cst (unsigned_type_node
, 9));
9142 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9145 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9147 if (!validate_arg (arg
, REAL_TYPE
))
9150 arg
= fold_convert_loc (loc
, type
, arg
);
9151 if (TREE_CODE (arg
) == REAL_CST
)
9152 return fold_abs_const (arg
, type
);
9153 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9156 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9159 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9161 if (!validate_arg (arg
, INTEGER_TYPE
))
9164 arg
= fold_convert_loc (loc
, type
, arg
);
9165 if (TREE_CODE (arg
) == INTEGER_CST
)
9166 return fold_abs_const (arg
, type
);
9167 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9170 /* Fold a fma operation with arguments ARG[012]. */
9173 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9174 tree type
, tree arg0
, tree arg1
, tree arg2
)
9176 if (TREE_CODE (arg0
) == REAL_CST
9177 && TREE_CODE (arg1
) == REAL_CST
9178 && TREE_CODE (arg2
) == REAL_CST
)
9179 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9184 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9187 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9189 if (validate_arg (arg0
, REAL_TYPE
)
9190 && validate_arg (arg1
, REAL_TYPE
)
9191 && validate_arg (arg2
, REAL_TYPE
))
9193 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9197 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9198 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9199 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9204 /* Fold a call to builtin fmin or fmax. */
9207 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9208 tree type
, bool max
)
9210 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9212 /* Calculate the result when the argument is a constant. */
9213 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9218 /* If either argument is NaN, return the other one. Avoid the
9219 transformation if we get (and honor) a signalling NaN. Using
9220 omit_one_operand() ensures we create a non-lvalue. */
9221 if (TREE_CODE (arg0
) == REAL_CST
9222 && real_isnan (&TREE_REAL_CST (arg0
))
9223 && (! HONOR_SNANS (arg0
)
9224 || ! TREE_REAL_CST (arg0
).signalling
))
9225 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9226 if (TREE_CODE (arg1
) == REAL_CST
9227 && real_isnan (&TREE_REAL_CST (arg1
))
9228 && (! HONOR_SNANS (arg1
)
9229 || ! TREE_REAL_CST (arg1
).signalling
))
9230 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9232 /* Transform fmin/fmax(x,x) -> x. */
9233 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9234 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9236 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9237 functions to return the numeric arg if the other one is NaN.
9238 These tree codes don't honor that, so only transform if
9239 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9240 handled, so we don't have to worry about it either. */
9241 if (flag_finite_math_only
)
9242 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9243 fold_convert_loc (loc
, type
, arg0
),
9244 fold_convert_loc (loc
, type
, arg1
));
9249 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9252 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9254 if (validate_arg (arg
, COMPLEX_TYPE
)
9255 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9257 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9261 tree new_arg
= builtin_save_expr (arg
);
9262 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9263 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9264 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9271 /* Fold a call to builtin logb/ilogb. */
9274 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9276 if (! validate_arg (arg
, REAL_TYPE
))
9281 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9283 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9289 /* If arg is Inf or NaN and we're logb, return it. */
9290 if (TREE_CODE (rettype
) == REAL_TYPE
)
9292 /* For logb(-Inf) we have to return +Inf. */
9293 if (real_isinf (value
) && real_isneg (value
))
9295 REAL_VALUE_TYPE tem
;
9297 return build_real (rettype
, tem
);
9299 return fold_convert_loc (loc
, rettype
, arg
);
9301 /* Fall through... */
9303 /* Zero may set errno and/or raise an exception for logb, also
9304 for ilogb we don't know FP_ILOGB0. */
9307 /* For normal numbers, proceed iff radix == 2. In GCC,
9308 normalized significands are in the range [0.5, 1.0). We
9309 want the exponent as if they were [1.0, 2.0) so get the
9310 exponent and subtract 1. */
9311 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9312 return fold_convert_loc (loc
, rettype
,
9313 build_int_cst (integer_type_node
,
9314 REAL_EXP (value
)-1));
9322 /* Fold a call to builtin significand, if radix == 2. */
9325 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9327 if (! validate_arg (arg
, REAL_TYPE
))
9332 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9334 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9341 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9342 return fold_convert_loc (loc
, rettype
, arg
);
9344 /* For normal numbers, proceed iff radix == 2. */
9345 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9347 REAL_VALUE_TYPE result
= *value
;
9348 /* In GCC, normalized significands are in the range [0.5,
9349 1.0). We want them to be [1.0, 2.0) so set the
9351 SET_REAL_EXP (&result
, 1);
9352 return build_real (rettype
, result
);
9361 /* Fold a call to builtin frexp, we can assume the base is 2. */
9364 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9366 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9371 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9374 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9376 /* Proceed if a valid pointer type was passed in. */
9377 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9379 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9385 /* For +-0, return (*exp = 0, +-0). */
9386 exp
= integer_zero_node
;
9391 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9392 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9395 /* Since the frexp function always expects base 2, and in
9396 GCC normalized significands are already in the range
9397 [0.5, 1.0), we have exactly what frexp wants. */
9398 REAL_VALUE_TYPE frac_rvt
= *value
;
9399 SET_REAL_EXP (&frac_rvt
, 0);
9400 frac
= build_real (rettype
, frac_rvt
);
9401 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9408 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9409 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9410 TREE_SIDE_EFFECTS (arg1
) = 1;
9411 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9417 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9418 then we can assume the base is two. If it's false, then we have to
9419 check the mode of the TYPE parameter in certain cases. */
9422 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9423 tree type
, bool ldexp
)
9425 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9430 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9431 if (real_zerop (arg0
) || integer_zerop (arg1
)
9432 || (TREE_CODE (arg0
) == REAL_CST
9433 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9434 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9436 /* If both arguments are constant, then try to evaluate it. */
9437 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9438 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9439 && tree_fits_shwi_p (arg1
))
9441 /* Bound the maximum adjustment to twice the range of the
9442 mode's valid exponents. Use abs to ensure the range is
9443 positive as a sanity check. */
9444 const long max_exp_adj
= 2 *
9445 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9446 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9448 /* Get the user-requested adjustment. */
9449 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9451 /* The requested adjustment must be inside this range. This
9452 is a preliminary cap to avoid things like overflow, we
9453 may still fail to compute the result for other reasons. */
9454 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9456 REAL_VALUE_TYPE initial_result
;
9458 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9460 /* Ensure we didn't overflow. */
9461 if (! real_isinf (&initial_result
))
9463 const REAL_VALUE_TYPE trunc_result
9464 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9466 /* Only proceed if the target mode can hold the
9468 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9469 return build_real (type
, trunc_result
);
9478 /* Fold a call to builtin modf. */
9481 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9483 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9488 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9491 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9493 /* Proceed if a valid pointer type was passed in. */
9494 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9496 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9497 REAL_VALUE_TYPE trunc
, frac
;
9503 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9504 trunc
= frac
= *value
;
9507 /* For +-Inf, return (*arg1 = arg0, +-0). */
9509 frac
.sign
= value
->sign
;
9513 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9514 real_trunc (&trunc
, VOIDmode
, value
);
9515 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9516 /* If the original number was negative and already
9517 integral, then the fractional part is -0.0. */
9518 if (value
->sign
&& frac
.cl
== rvc_zero
)
9519 frac
.sign
= value
->sign
;
9523 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9524 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9525 build_real (rettype
, trunc
));
9526 TREE_SIDE_EFFECTS (arg1
) = 1;
9527 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9528 build_real (rettype
, frac
));
9534 /* Given a location LOC, an interclass builtin function decl FNDECL
9535 and its single argument ARG, return an folded expression computing
9536 the same, or NULL_TREE if we either couldn't or didn't want to fold
9537 (the latter happen if there's an RTL instruction available). */
9540 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9544 if (!validate_arg (arg
, REAL_TYPE
))
9547 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9550 mode
= TYPE_MODE (TREE_TYPE (arg
));
9552 /* If there is no optab, try generic code. */
9553 switch (DECL_FUNCTION_CODE (fndecl
))
9557 CASE_FLT_FN (BUILT_IN_ISINF
):
9559 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9560 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9561 tree
const type
= TREE_TYPE (arg
);
9565 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9566 real_from_string (&r
, buf
);
9567 result
= build_call_expr (isgr_fn
, 2,
9568 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9569 build_real (type
, r
));
9572 CASE_FLT_FN (BUILT_IN_FINITE
):
9573 case BUILT_IN_ISFINITE
:
9575 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9576 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9577 tree
const type
= TREE_TYPE (arg
);
9581 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9582 real_from_string (&r
, buf
);
9583 result
= build_call_expr (isle_fn
, 2,
9584 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9585 build_real (type
, r
));
9586 /*result = fold_build2_loc (loc, UNGT_EXPR,
9587 TREE_TYPE (TREE_TYPE (fndecl)),
9588 fold_build1_loc (loc, ABS_EXPR, type, arg),
9589 build_real (type, r));
9590 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9591 TREE_TYPE (TREE_TYPE (fndecl)),
9595 case BUILT_IN_ISNORMAL
:
9597 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9598 islessequal(fabs(x),DBL_MAX). */
9599 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9600 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9601 tree
const type
= TREE_TYPE (arg
);
9602 REAL_VALUE_TYPE rmax
, rmin
;
9605 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9606 real_from_string (&rmax
, buf
);
9607 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9608 real_from_string (&rmin
, buf
);
9609 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9610 result
= build_call_expr (isle_fn
, 2, arg
,
9611 build_real (type
, rmax
));
9612 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9613 build_call_expr (isge_fn
, 2, arg
,
9614 build_real (type
, rmin
)));
9624 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9625 ARG is the argument for the call. */
9628 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9630 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9633 if (!validate_arg (arg
, REAL_TYPE
))
9636 switch (builtin_index
)
9638 case BUILT_IN_ISINF
:
9639 if (!HONOR_INFINITIES (arg
))
9640 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9642 if (TREE_CODE (arg
) == REAL_CST
)
9644 r
= TREE_REAL_CST (arg
);
9645 if (real_isinf (&r
))
9646 return real_compare (GT_EXPR
, &r
, &dconst0
)
9647 ? integer_one_node
: integer_minus_one_node
;
9649 return integer_zero_node
;
9654 case BUILT_IN_ISINF_SIGN
:
9656 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9657 /* In a boolean context, GCC will fold the inner COND_EXPR to
9658 1. So e.g. "if (isinf_sign(x))" would be folded to just
9659 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9660 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9661 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9662 tree tmp
= NULL_TREE
;
9664 arg
= builtin_save_expr (arg
);
9666 if (signbit_fn
&& isinf_fn
)
9668 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9669 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9671 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9672 signbit_call
, integer_zero_node
);
9673 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9674 isinf_call
, integer_zero_node
);
9676 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9677 integer_minus_one_node
, integer_one_node
);
9678 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9686 case BUILT_IN_ISFINITE
:
9687 if (!HONOR_NANS (arg
)
9688 && !HONOR_INFINITIES (arg
))
9689 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9691 if (TREE_CODE (arg
) == REAL_CST
)
9693 r
= TREE_REAL_CST (arg
);
9694 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9699 case BUILT_IN_ISNAN
:
9700 if (!HONOR_NANS (arg
))
9701 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9703 if (TREE_CODE (arg
) == REAL_CST
)
9705 r
= TREE_REAL_CST (arg
);
9706 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9709 arg
= builtin_save_expr (arg
);
9710 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9717 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9718 This builtin will generate code to return the appropriate floating
9719 point classification depending on the value of the floating point
9720 number passed in. The possible return values must be supplied as
9721 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9722 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9723 one floating point argument which is "type generic". */
9726 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9728 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9729 arg
, type
, res
, tmp
;
9734 /* Verify the required arguments in the original call. */
9736 || !validate_arg (args
[0], INTEGER_TYPE
)
9737 || !validate_arg (args
[1], INTEGER_TYPE
)
9738 || !validate_arg (args
[2], INTEGER_TYPE
)
9739 || !validate_arg (args
[3], INTEGER_TYPE
)
9740 || !validate_arg (args
[4], INTEGER_TYPE
)
9741 || !validate_arg (args
[5], REAL_TYPE
))
9745 fp_infinite
= args
[1];
9746 fp_normal
= args
[2];
9747 fp_subnormal
= args
[3];
9750 type
= TREE_TYPE (arg
);
9751 mode
= TYPE_MODE (type
);
9752 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9756 (fabs(x) == Inf ? FP_INFINITE :
9757 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9758 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9760 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9761 build_real (type
, dconst0
));
9762 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9763 tmp
, fp_zero
, fp_subnormal
);
9765 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9766 real_from_string (&r
, buf
);
9767 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9768 arg
, build_real (type
, r
));
9769 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9771 if (HONOR_INFINITIES (mode
))
9774 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9775 build_real (type
, r
));
9776 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9780 if (HONOR_NANS (mode
))
9782 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9783 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9789 /* Fold a call to an unordered comparison function such as
9790 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9791 being called and ARG0 and ARG1 are the arguments for the call.
9792 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9793 the opposite of the desired result. UNORDERED_CODE is used
9794 for modes that can hold NaNs and ORDERED_CODE is used for
9798 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9799 enum tree_code unordered_code
,
9800 enum tree_code ordered_code
)
9802 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9803 enum tree_code code
;
9805 enum tree_code code0
, code1
;
9806 tree cmp_type
= NULL_TREE
;
9808 type0
= TREE_TYPE (arg0
);
9809 type1
= TREE_TYPE (arg1
);
9811 code0
= TREE_CODE (type0
);
9812 code1
= TREE_CODE (type1
);
9814 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9815 /* Choose the wider of two real types. */
9816 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9818 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9820 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9823 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9824 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9826 if (unordered_code
== UNORDERED_EXPR
)
9828 if (!HONOR_NANS (arg0
))
9829 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9830 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9833 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9834 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9835 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9838 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9839 arithmetics if it can never overflow, or into internal functions that
9840 return both result of arithmetics and overflowed boolean flag in
9841 a complex integer result, or some other check for overflow. */
9844 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9845 tree arg0
, tree arg1
, tree arg2
)
9847 enum internal_fn ifn
= IFN_LAST
;
9848 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9849 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9852 case BUILT_IN_ADD_OVERFLOW
:
9853 case BUILT_IN_SADD_OVERFLOW
:
9854 case BUILT_IN_SADDL_OVERFLOW
:
9855 case BUILT_IN_SADDLL_OVERFLOW
:
9856 case BUILT_IN_UADD_OVERFLOW
:
9857 case BUILT_IN_UADDL_OVERFLOW
:
9858 case BUILT_IN_UADDLL_OVERFLOW
:
9859 ifn
= IFN_ADD_OVERFLOW
;
9861 case BUILT_IN_SUB_OVERFLOW
:
9862 case BUILT_IN_SSUB_OVERFLOW
:
9863 case BUILT_IN_SSUBL_OVERFLOW
:
9864 case BUILT_IN_SSUBLL_OVERFLOW
:
9865 case BUILT_IN_USUB_OVERFLOW
:
9866 case BUILT_IN_USUBL_OVERFLOW
:
9867 case BUILT_IN_USUBLL_OVERFLOW
:
9868 ifn
= IFN_SUB_OVERFLOW
;
9870 case BUILT_IN_MUL_OVERFLOW
:
9871 case BUILT_IN_SMUL_OVERFLOW
:
9872 case BUILT_IN_SMULL_OVERFLOW
:
9873 case BUILT_IN_SMULLL_OVERFLOW
:
9874 case BUILT_IN_UMUL_OVERFLOW
:
9875 case BUILT_IN_UMULL_OVERFLOW
:
9876 case BUILT_IN_UMULLL_OVERFLOW
:
9877 ifn
= IFN_MUL_OVERFLOW
;
9882 tree ctype
= build_complex_type (type
);
9883 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9885 tree tgt
= save_expr (call
);
9886 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9887 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9888 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9890 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9891 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9894 /* Fold a call to built-in function FNDECL with 0 arguments.
9895 This function returns NULL_TREE if no simplification was possible. */
9898 fold_builtin_0 (location_t loc
, tree fndecl
)
9900 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9901 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9904 CASE_FLT_FN (BUILT_IN_INF
):
9905 case BUILT_IN_INFD32
:
9906 case BUILT_IN_INFD64
:
9907 case BUILT_IN_INFD128
:
9908 return fold_builtin_inf (loc
, type
, true);
9910 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9911 return fold_builtin_inf (loc
, type
, false);
9913 case BUILT_IN_CLASSIFY_TYPE
:
9914 return fold_builtin_classify_type (NULL_TREE
);
9922 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9923 This function returns NULL_TREE if no simplification was possible. */
9926 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9928 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9929 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9932 case BUILT_IN_CONSTANT_P
:
9934 tree val
= fold_builtin_constant_p (arg0
);
9936 /* Gimplification will pull the CALL_EXPR for the builtin out of
9937 an if condition. When not optimizing, we'll not CSE it back.
9938 To avoid link error types of regressions, return false now. */
9939 if (!val
&& !optimize
)
9940 val
= integer_zero_node
;
9945 case BUILT_IN_CLASSIFY_TYPE
:
9946 return fold_builtin_classify_type (arg0
);
9948 case BUILT_IN_STRLEN
:
9949 return fold_builtin_strlen (loc
, type
, arg0
);
9951 CASE_FLT_FN (BUILT_IN_FABS
):
9952 case BUILT_IN_FABSD32
:
9953 case BUILT_IN_FABSD64
:
9954 case BUILT_IN_FABSD128
:
9955 return fold_builtin_fabs (loc
, arg0
, type
);
9959 case BUILT_IN_LLABS
:
9960 case BUILT_IN_IMAXABS
:
9961 return fold_builtin_abs (loc
, arg0
, type
);
9963 CASE_FLT_FN (BUILT_IN_CONJ
):
9964 if (validate_arg (arg0
, COMPLEX_TYPE
)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9966 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9969 CASE_FLT_FN (BUILT_IN_CREAL
):
9970 if (validate_arg (arg0
, COMPLEX_TYPE
)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9972 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9975 CASE_FLT_FN (BUILT_IN_CIMAG
):
9976 if (validate_arg (arg0
, COMPLEX_TYPE
)
9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9978 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9981 CASE_FLT_FN (BUILT_IN_CCOS
):
9982 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9984 CASE_FLT_FN (BUILT_IN_CCOSH
):
9985 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9987 CASE_FLT_FN (BUILT_IN_CPROJ
):
9988 return fold_builtin_cproj (loc
, arg0
, type
);
9990 CASE_FLT_FN (BUILT_IN_CSIN
):
9991 if (validate_arg (arg0
, COMPLEX_TYPE
)
9992 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9993 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9996 CASE_FLT_FN (BUILT_IN_CSINH
):
9997 if (validate_arg (arg0
, COMPLEX_TYPE
)
9998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9999 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10002 CASE_FLT_FN (BUILT_IN_CTAN
):
10003 if (validate_arg (arg0
, COMPLEX_TYPE
)
10004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10005 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10008 CASE_FLT_FN (BUILT_IN_CTANH
):
10009 if (validate_arg (arg0
, COMPLEX_TYPE
)
10010 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10011 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10014 CASE_FLT_FN (BUILT_IN_CLOG
):
10015 if (validate_arg (arg0
, COMPLEX_TYPE
)
10016 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10017 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10020 CASE_FLT_FN (BUILT_IN_CSQRT
):
10021 if (validate_arg (arg0
, COMPLEX_TYPE
)
10022 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10023 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10026 CASE_FLT_FN (BUILT_IN_CASIN
):
10027 if (validate_arg (arg0
, COMPLEX_TYPE
)
10028 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10029 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10032 CASE_FLT_FN (BUILT_IN_CACOS
):
10033 if (validate_arg (arg0
, COMPLEX_TYPE
)
10034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10035 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10038 CASE_FLT_FN (BUILT_IN_CATAN
):
10039 if (validate_arg (arg0
, COMPLEX_TYPE
)
10040 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10041 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10044 CASE_FLT_FN (BUILT_IN_CASINH
):
10045 if (validate_arg (arg0
, COMPLEX_TYPE
)
10046 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10047 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10050 CASE_FLT_FN (BUILT_IN_CACOSH
):
10051 if (validate_arg (arg0
, COMPLEX_TYPE
)
10052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10053 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10056 CASE_FLT_FN (BUILT_IN_CATANH
):
10057 if (validate_arg (arg0
, COMPLEX_TYPE
)
10058 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10059 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10062 CASE_FLT_FN (BUILT_IN_CABS
):
10063 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10065 CASE_FLT_FN (BUILT_IN_CARG
):
10066 return fold_builtin_carg (loc
, arg0
, type
);
10068 CASE_FLT_FN (BUILT_IN_SQRT
):
10069 return fold_builtin_sqrt (loc
, arg0
, type
);
10071 CASE_FLT_FN (BUILT_IN_CBRT
):
10072 return fold_builtin_cbrt (loc
, arg0
, type
);
10074 CASE_FLT_FN (BUILT_IN_ASIN
):
10075 if (validate_arg (arg0
, REAL_TYPE
))
10076 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10077 &dconstm1
, &dconst1
, true);
10080 CASE_FLT_FN (BUILT_IN_ACOS
):
10081 if (validate_arg (arg0
, REAL_TYPE
))
10082 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10083 &dconstm1
, &dconst1
, true);
10086 CASE_FLT_FN (BUILT_IN_ATAN
):
10087 if (validate_arg (arg0
, REAL_TYPE
))
10088 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10091 CASE_FLT_FN (BUILT_IN_ASINH
):
10092 if (validate_arg (arg0
, REAL_TYPE
))
10093 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10096 CASE_FLT_FN (BUILT_IN_ACOSH
):
10097 if (validate_arg (arg0
, REAL_TYPE
))
10098 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10099 &dconst1
, NULL
, true);
10102 CASE_FLT_FN (BUILT_IN_ATANH
):
10103 if (validate_arg (arg0
, REAL_TYPE
))
10104 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10105 &dconstm1
, &dconst1
, false);
10108 CASE_FLT_FN (BUILT_IN_SIN
):
10109 if (validate_arg (arg0
, REAL_TYPE
))
10110 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10113 CASE_FLT_FN (BUILT_IN_COS
):
10114 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10116 CASE_FLT_FN (BUILT_IN_TAN
):
10117 return fold_builtin_tan (arg0
, type
);
10119 CASE_FLT_FN (BUILT_IN_CEXP
):
10120 return fold_builtin_cexp (loc
, arg0
, type
);
10122 CASE_FLT_FN (BUILT_IN_CEXPI
):
10123 if (validate_arg (arg0
, REAL_TYPE
))
10124 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10127 CASE_FLT_FN (BUILT_IN_SINH
):
10128 if (validate_arg (arg0
, REAL_TYPE
))
10129 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10132 CASE_FLT_FN (BUILT_IN_COSH
):
10133 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10135 CASE_FLT_FN (BUILT_IN_TANH
):
10136 if (validate_arg (arg0
, REAL_TYPE
))
10137 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10140 CASE_FLT_FN (BUILT_IN_ERF
):
10141 if (validate_arg (arg0
, REAL_TYPE
))
10142 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10145 CASE_FLT_FN (BUILT_IN_ERFC
):
10146 if (validate_arg (arg0
, REAL_TYPE
))
10147 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10150 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10151 if (validate_arg (arg0
, REAL_TYPE
))
10152 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10155 CASE_FLT_FN (BUILT_IN_EXP
):
10156 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10158 CASE_FLT_FN (BUILT_IN_EXP2
):
10159 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10161 CASE_FLT_FN (BUILT_IN_EXP10
):
10162 CASE_FLT_FN (BUILT_IN_POW10
):
10163 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10165 CASE_FLT_FN (BUILT_IN_EXPM1
):
10166 if (validate_arg (arg0
, REAL_TYPE
))
10167 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10170 CASE_FLT_FN (BUILT_IN_LOG
):
10171 if (validate_arg (arg0
, REAL_TYPE
))
10172 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10175 CASE_FLT_FN (BUILT_IN_LOG2
):
10176 if (validate_arg (arg0
, REAL_TYPE
))
10177 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10180 CASE_FLT_FN (BUILT_IN_LOG10
):
10181 if (validate_arg (arg0
, REAL_TYPE
))
10182 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10185 CASE_FLT_FN (BUILT_IN_LOG1P
):
10186 if (validate_arg (arg0
, REAL_TYPE
))
10187 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10188 &dconstm1
, NULL
, false);
10191 CASE_FLT_FN (BUILT_IN_J0
):
10192 if (validate_arg (arg0
, REAL_TYPE
))
10193 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10197 CASE_FLT_FN (BUILT_IN_J1
):
10198 if (validate_arg (arg0
, REAL_TYPE
))
10199 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10203 CASE_FLT_FN (BUILT_IN_Y0
):
10204 if (validate_arg (arg0
, REAL_TYPE
))
10205 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10206 &dconst0
, NULL
, false);
10209 CASE_FLT_FN (BUILT_IN_Y1
):
10210 if (validate_arg (arg0
, REAL_TYPE
))
10211 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10212 &dconst0
, NULL
, false);
10215 CASE_FLT_FN (BUILT_IN_NAN
):
10216 case BUILT_IN_NAND32
:
10217 case BUILT_IN_NAND64
:
10218 case BUILT_IN_NAND128
:
10219 return fold_builtin_nan (arg0
, type
, true);
10221 CASE_FLT_FN (BUILT_IN_NANS
):
10222 return fold_builtin_nan (arg0
, type
, false);
10224 CASE_FLT_FN (BUILT_IN_FLOOR
):
10225 return fold_builtin_floor (loc
, fndecl
, arg0
);
10227 CASE_FLT_FN (BUILT_IN_CEIL
):
10228 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10230 CASE_FLT_FN (BUILT_IN_TRUNC
):
10231 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10233 CASE_FLT_FN (BUILT_IN_ROUND
):
10234 return fold_builtin_round (loc
, fndecl
, arg0
);
10236 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10237 CASE_FLT_FN (BUILT_IN_RINT
):
10238 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10240 CASE_FLT_FN (BUILT_IN_ICEIL
):
10241 CASE_FLT_FN (BUILT_IN_LCEIL
):
10242 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10243 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10244 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10245 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10246 CASE_FLT_FN (BUILT_IN_IROUND
):
10247 CASE_FLT_FN (BUILT_IN_LROUND
):
10248 CASE_FLT_FN (BUILT_IN_LLROUND
):
10249 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10251 CASE_FLT_FN (BUILT_IN_IRINT
):
10252 CASE_FLT_FN (BUILT_IN_LRINT
):
10253 CASE_FLT_FN (BUILT_IN_LLRINT
):
10254 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10256 case BUILT_IN_BSWAP16
:
10257 case BUILT_IN_BSWAP32
:
10258 case BUILT_IN_BSWAP64
:
10259 return fold_builtin_bswap (fndecl
, arg0
);
10261 CASE_INT_FN (BUILT_IN_FFS
):
10262 CASE_INT_FN (BUILT_IN_CLZ
):
10263 CASE_INT_FN (BUILT_IN_CTZ
):
10264 CASE_INT_FN (BUILT_IN_CLRSB
):
10265 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10266 CASE_INT_FN (BUILT_IN_PARITY
):
10267 return fold_builtin_bitop (fndecl
, arg0
);
10269 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10270 return fold_builtin_signbit (loc
, arg0
, type
);
10272 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10273 return fold_builtin_significand (loc
, arg0
, type
);
10275 CASE_FLT_FN (BUILT_IN_ILOGB
):
10276 CASE_FLT_FN (BUILT_IN_LOGB
):
10277 return fold_builtin_logb (loc
, arg0
, type
);
10279 case BUILT_IN_ISASCII
:
10280 return fold_builtin_isascii (loc
, arg0
);
10282 case BUILT_IN_TOASCII
:
10283 return fold_builtin_toascii (loc
, arg0
);
10285 case BUILT_IN_ISDIGIT
:
10286 return fold_builtin_isdigit (loc
, arg0
);
10288 CASE_FLT_FN (BUILT_IN_FINITE
):
10289 case BUILT_IN_FINITED32
:
10290 case BUILT_IN_FINITED64
:
10291 case BUILT_IN_FINITED128
:
10292 case BUILT_IN_ISFINITE
:
10294 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10297 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10300 CASE_FLT_FN (BUILT_IN_ISINF
):
10301 case BUILT_IN_ISINFD32
:
10302 case BUILT_IN_ISINFD64
:
10303 case BUILT_IN_ISINFD128
:
10305 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10308 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10311 case BUILT_IN_ISNORMAL
:
10312 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10314 case BUILT_IN_ISINF_SIGN
:
10315 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10317 CASE_FLT_FN (BUILT_IN_ISNAN
):
10318 case BUILT_IN_ISNAND32
:
10319 case BUILT_IN_ISNAND64
:
10320 case BUILT_IN_ISNAND128
:
10321 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10323 case BUILT_IN_FREE
:
10324 if (integer_zerop (arg0
))
10325 return build_empty_stmt (loc
);
10336 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10337 This function returns NULL_TREE if no simplification was possible. */
10340 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10342 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10343 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10347 CASE_FLT_FN (BUILT_IN_JN
):
10348 if (validate_arg (arg0
, INTEGER_TYPE
)
10349 && validate_arg (arg1
, REAL_TYPE
))
10350 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10353 CASE_FLT_FN (BUILT_IN_YN
):
10354 if (validate_arg (arg0
, INTEGER_TYPE
)
10355 && validate_arg (arg1
, REAL_TYPE
))
10356 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10360 CASE_FLT_FN (BUILT_IN_DREM
):
10361 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10362 if (validate_arg (arg0
, REAL_TYPE
)
10363 && validate_arg (arg1
, REAL_TYPE
))
10364 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10367 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10368 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10369 if (validate_arg (arg0
, REAL_TYPE
)
10370 && validate_arg (arg1
, POINTER_TYPE
))
10371 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10374 CASE_FLT_FN (BUILT_IN_ATAN2
):
10375 if (validate_arg (arg0
, REAL_TYPE
)
10376 && validate_arg (arg1
, REAL_TYPE
))
10377 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10380 CASE_FLT_FN (BUILT_IN_FDIM
):
10381 if (validate_arg (arg0
, REAL_TYPE
)
10382 && validate_arg (arg1
, REAL_TYPE
))
10383 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10386 CASE_FLT_FN (BUILT_IN_HYPOT
):
10387 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10389 CASE_FLT_FN (BUILT_IN_CPOW
):
10390 if (validate_arg (arg0
, COMPLEX_TYPE
)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10392 && validate_arg (arg1
, COMPLEX_TYPE
)
10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10394 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10397 CASE_FLT_FN (BUILT_IN_LDEXP
):
10398 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10399 CASE_FLT_FN (BUILT_IN_SCALBN
):
10400 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10401 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10402 type
, /*ldexp=*/false);
10404 CASE_FLT_FN (BUILT_IN_FREXP
):
10405 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10407 CASE_FLT_FN (BUILT_IN_MODF
):
10408 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10410 case BUILT_IN_STRSTR
:
10411 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10413 case BUILT_IN_STRSPN
:
10414 return fold_builtin_strspn (loc
, arg0
, arg1
);
10416 case BUILT_IN_STRCSPN
:
10417 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10419 case BUILT_IN_STRCHR
:
10420 case BUILT_IN_INDEX
:
10421 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10423 case BUILT_IN_STRRCHR
:
10424 case BUILT_IN_RINDEX
:
10425 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10427 case BUILT_IN_STRCMP
:
10428 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10430 case BUILT_IN_STRPBRK
:
10431 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10433 case BUILT_IN_EXPECT
:
10434 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10436 CASE_FLT_FN (BUILT_IN_POW
):
10437 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10439 CASE_FLT_FN (BUILT_IN_POWI
):
10440 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10442 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10443 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10445 CASE_FLT_FN (BUILT_IN_FMIN
):
10446 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10448 CASE_FLT_FN (BUILT_IN_FMAX
):
10449 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10451 case BUILT_IN_ISGREATER
:
10452 return fold_builtin_unordered_cmp (loc
, fndecl
,
10453 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10454 case BUILT_IN_ISGREATEREQUAL
:
10455 return fold_builtin_unordered_cmp (loc
, fndecl
,
10456 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10457 case BUILT_IN_ISLESS
:
10458 return fold_builtin_unordered_cmp (loc
, fndecl
,
10459 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10460 case BUILT_IN_ISLESSEQUAL
:
10461 return fold_builtin_unordered_cmp (loc
, fndecl
,
10462 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10463 case BUILT_IN_ISLESSGREATER
:
10464 return fold_builtin_unordered_cmp (loc
, fndecl
,
10465 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10466 case BUILT_IN_ISUNORDERED
:
10467 return fold_builtin_unordered_cmp (loc
, fndecl
,
10468 arg0
, arg1
, UNORDERED_EXPR
,
10471 /* We do the folding for va_start in the expander. */
10472 case BUILT_IN_VA_START
:
10475 case BUILT_IN_OBJECT_SIZE
:
10476 return fold_builtin_object_size (arg0
, arg1
);
10478 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10479 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10481 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10482 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10490 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10492 This function returns NULL_TREE if no simplification was possible. */
10495 fold_builtin_3 (location_t loc
, tree fndecl
,
10496 tree arg0
, tree arg1
, tree arg2
)
10498 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10499 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10503 CASE_FLT_FN (BUILT_IN_SINCOS
):
10504 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10506 CASE_FLT_FN (BUILT_IN_FMA
):
10507 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10510 CASE_FLT_FN (BUILT_IN_REMQUO
):
10511 if (validate_arg (arg0
, REAL_TYPE
)
10512 && validate_arg (arg1
, REAL_TYPE
)
10513 && validate_arg (arg2
, POINTER_TYPE
))
10514 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10517 case BUILT_IN_STRNCMP
:
10518 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10520 case BUILT_IN_MEMCHR
:
10521 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10523 case BUILT_IN_BCMP
:
10524 case BUILT_IN_MEMCMP
:
10525 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10527 case BUILT_IN_EXPECT
:
10528 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10530 case BUILT_IN_ADD_OVERFLOW
:
10531 case BUILT_IN_SUB_OVERFLOW
:
10532 case BUILT_IN_MUL_OVERFLOW
:
10533 case BUILT_IN_SADD_OVERFLOW
:
10534 case BUILT_IN_SADDL_OVERFLOW
:
10535 case BUILT_IN_SADDLL_OVERFLOW
:
10536 case BUILT_IN_SSUB_OVERFLOW
:
10537 case BUILT_IN_SSUBL_OVERFLOW
:
10538 case BUILT_IN_SSUBLL_OVERFLOW
:
10539 case BUILT_IN_SMUL_OVERFLOW
:
10540 case BUILT_IN_SMULL_OVERFLOW
:
10541 case BUILT_IN_SMULLL_OVERFLOW
:
10542 case BUILT_IN_UADD_OVERFLOW
:
10543 case BUILT_IN_UADDL_OVERFLOW
:
10544 case BUILT_IN_UADDLL_OVERFLOW
:
10545 case BUILT_IN_USUB_OVERFLOW
:
10546 case BUILT_IN_USUBL_OVERFLOW
:
10547 case BUILT_IN_USUBLL_OVERFLOW
:
10548 case BUILT_IN_UMUL_OVERFLOW
:
10549 case BUILT_IN_UMULL_OVERFLOW
:
10550 case BUILT_IN_UMULLL_OVERFLOW
:
10551 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10559 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10560 arguments. IGNORE is true if the result of the
10561 function call is ignored. This function returns NULL_TREE if no
10562 simplification was possible. */
10565 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10567 tree ret
= NULL_TREE
;
10572 ret
= fold_builtin_0 (loc
, fndecl
);
10575 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10578 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10581 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10584 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10589 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10590 SET_EXPR_LOCATION (ret
, loc
);
10591 TREE_NO_WARNING (ret
) = 1;
10597 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10598 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10599 of arguments in ARGS to be omitted. OLDNARGS is the number of
10600 elements in ARGS. */
10603 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10604 int skip
, tree fndecl
, int n
, va_list newargs
)
10606 int nargs
= oldnargs
- skip
+ n
;
10613 buffer
= XALLOCAVEC (tree
, nargs
);
10614 for (i
= 0; i
< n
; i
++)
10615 buffer
[i
] = va_arg (newargs
, tree
);
10616 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10617 buffer
[i
] = args
[j
];
10620 buffer
= args
+ skip
;
10622 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10625 /* Return true if FNDECL shouldn't be folded right now.
10626 If a built-in function has an inline attribute always_inline
10627 wrapper, defer folding it after always_inline functions have
10628 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10629 might not be performed. */
10632 avoid_folding_inline_builtin (tree fndecl
)
10634 return (DECL_DECLARED_INLINE_P (fndecl
)
10635 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10637 && !cfun
->always_inline_functions_inlined
10638 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10641 /* A wrapper function for builtin folding that prevents warnings for
10642 "statement without effect" and the like, caused by removing the
10643 call node earlier than the warning is generated. */
10646 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10648 tree ret
= NULL_TREE
;
10649 tree fndecl
= get_callee_fndecl (exp
);
10651 && TREE_CODE (fndecl
) == FUNCTION_DECL
10652 && DECL_BUILT_IN (fndecl
)
10653 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10654 yet. Defer folding until we see all the arguments
10655 (after inlining). */
10656 && !CALL_EXPR_VA_ARG_PACK (exp
))
10658 int nargs
= call_expr_nargs (exp
);
10660 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10661 instead last argument is __builtin_va_arg_pack (). Defer folding
10662 even in that case, until arguments are finalized. */
10663 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10665 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10667 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10668 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10669 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10673 if (avoid_folding_inline_builtin (fndecl
))
10676 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10677 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10678 CALL_EXPR_ARGP (exp
), ignore
);
10681 tree
*args
= CALL_EXPR_ARGP (exp
);
10682 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10690 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10691 N arguments are passed in the array ARGARRAY. Return a folded
10692 expression or NULL_TREE if no simplification was possible. */
10695 fold_builtin_call_array (location_t loc
, tree
,
10700 if (TREE_CODE (fn
) != ADDR_EXPR
)
10703 tree fndecl
= TREE_OPERAND (fn
, 0);
10704 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10705 && DECL_BUILT_IN (fndecl
))
10707 /* If last argument is __builtin_va_arg_pack (), arguments to this
10708 function are not finalized yet. Defer folding until they are. */
10709 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10711 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10713 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10714 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10715 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10718 if (avoid_folding_inline_builtin (fndecl
))
10720 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10721 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10723 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10729 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10730 along with N new arguments specified as the "..." parameters. SKIP
10731 is the number of arguments in EXP to be omitted. This function is used
10732 to do varargs-to-varargs transformations. */
10735 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10741 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10742 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10748 /* Validate a single argument ARG against a tree code CODE representing
10752 validate_arg (const_tree arg
, enum tree_code code
)
10756 else if (code
== POINTER_TYPE
)
10757 return POINTER_TYPE_P (TREE_TYPE (arg
));
10758 else if (code
== INTEGER_TYPE
)
10759 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10760 return code
== TREE_CODE (TREE_TYPE (arg
));
10763 /* This function validates the types of a function call argument list
10764 against a specified list of tree_codes. If the last specifier is a 0,
10765 that represents an ellipses, otherwise the last specifier must be a
10768 This is the GIMPLE version of validate_arglist. Eventually we want to
10769 completely convert builtins.c to work from GIMPLEs and the tree based
10770 validate_arglist will then be removed. */
10773 validate_gimple_arglist (const gcall
*call
, ...)
10775 enum tree_code code
;
10781 va_start (ap
, call
);
10786 code
= (enum tree_code
) va_arg (ap
, int);
10790 /* This signifies an ellipses, any further arguments are all ok. */
10794 /* This signifies an endlink, if no arguments remain, return
10795 true, otherwise return false. */
10796 res
= (i
== gimple_call_num_args (call
));
10799 /* If no parameters remain or the parameter's code does not
10800 match the specified code, return false. Otherwise continue
10801 checking any remaining arguments. */
10802 arg
= gimple_call_arg (call
, i
++);
10803 if (!validate_arg (arg
, code
))
10810 /* We need gotos here since we can only have one VA_CLOSE in a
10818 /* Default target-specific builtin expander that does nothing. */
10821 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10822 rtx target ATTRIBUTE_UNUSED
,
10823 rtx subtarget ATTRIBUTE_UNUSED
,
10824 machine_mode mode ATTRIBUTE_UNUSED
,
10825 int ignore ATTRIBUTE_UNUSED
)
10830 /* Returns true is EXP represents data that would potentially reside
10831 in a readonly section. */
10834 readonly_data_expr (tree exp
)
10838 if (TREE_CODE (exp
) != ADDR_EXPR
)
10841 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10845 /* Make sure we call decl_readonly_section only for trees it
10846 can handle (since it returns true for everything it doesn't
10848 if (TREE_CODE (exp
) == STRING_CST
10849 || TREE_CODE (exp
) == CONSTRUCTOR
10850 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10851 return decl_readonly_section (exp
, 0);
10856 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10857 to the call, and TYPE is its return type.
10859 Return NULL_TREE if no simplification was possible, otherwise return the
10860 simplified form of the call as a tree.
10862 The simplified form may be a constant or other expression which
10863 computes the same value, but in a more efficient manner (including
10864 calls to other builtin functions).
10866 The call may contain arguments which need to be evaluated, but
10867 which are not useful to determine the result of the call. In
10868 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10869 COMPOUND_EXPR will be an argument which must be evaluated.
10870 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10871 COMPOUND_EXPR in the chain will contain the tree for the simplified
10872 form of the builtin function call. */
10875 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10877 if (!validate_arg (s1
, POINTER_TYPE
)
10878 || !validate_arg (s2
, POINTER_TYPE
))
10883 const char *p1
, *p2
;
10885 p2
= c_getstr (s2
);
10889 p1
= c_getstr (s1
);
10892 const char *r
= strstr (p1
, p2
);
10896 return build_int_cst (TREE_TYPE (s1
), 0);
10898 /* Return an offset into the constant string argument. */
10899 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10900 return fold_convert_loc (loc
, type
, tem
);
10903 /* The argument is const char *, and the result is char *, so we need
10904 a type conversion here to avoid a warning. */
10906 return fold_convert_loc (loc
, type
, s1
);
10911 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10915 /* New argument list transforming strstr(s1, s2) to
10916 strchr(s1, s2[0]). */
10917 return build_call_expr_loc (loc
, fn
, 2, s1
,
10918 build_int_cst (integer_type_node
, p2
[0]));
10922 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10923 the call, and TYPE is its return type.
10925 Return NULL_TREE if no simplification was possible, otherwise return the
10926 simplified form of the call as a tree.
10928 The simplified form may be a constant or other expression which
10929 computes the same value, but in a more efficient manner (including
10930 calls to other builtin functions).
10932 The call may contain arguments which need to be evaluated, but
10933 which are not useful to determine the result of the call. In
10934 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10935 COMPOUND_EXPR will be an argument which must be evaluated.
10936 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10937 COMPOUND_EXPR in the chain will contain the tree for the simplified
10938 form of the builtin function call. */
10941 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10943 if (!validate_arg (s1
, POINTER_TYPE
)
10944 || !validate_arg (s2
, INTEGER_TYPE
))
10950 if (TREE_CODE (s2
) != INTEGER_CST
)
10953 p1
= c_getstr (s1
);
10960 if (target_char_cast (s2
, &c
))
10963 r
= strchr (p1
, c
);
10966 return build_int_cst (TREE_TYPE (s1
), 0);
10968 /* Return an offset into the constant string argument. */
10969 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10970 return fold_convert_loc (loc
, type
, tem
);
10976 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10977 the call, and TYPE is its return type.
10979 Return NULL_TREE if no simplification was possible, otherwise return the
10980 simplified form of the call as a tree.
10982 The simplified form may be a constant or other expression which
10983 computes the same value, but in a more efficient manner (including
10984 calls to other builtin functions).
10986 The call may contain arguments which need to be evaluated, but
10987 which are not useful to determine the result of the call. In
10988 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10989 COMPOUND_EXPR will be an argument which must be evaluated.
10990 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10991 COMPOUND_EXPR in the chain will contain the tree for the simplified
10992 form of the builtin function call. */
10995 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10997 if (!validate_arg (s1
, POINTER_TYPE
)
10998 || !validate_arg (s2
, INTEGER_TYPE
))
11005 if (TREE_CODE (s2
) != INTEGER_CST
)
11008 p1
= c_getstr (s1
);
11015 if (target_char_cast (s2
, &c
))
11018 r
= strrchr (p1
, c
);
11021 return build_int_cst (TREE_TYPE (s1
), 0);
11023 /* Return an offset into the constant string argument. */
11024 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11025 return fold_convert_loc (loc
, type
, tem
);
11028 if (! integer_zerop (s2
))
11031 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11035 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11036 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11040 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11041 to the call, and TYPE is its return type.
11043 Return NULL_TREE if no simplification was possible, otherwise return the
11044 simplified form of the call as a tree.
11046 The simplified form may be a constant or other expression which
11047 computes the same value, but in a more efficient manner (including
11048 calls to other builtin functions).
11050 The call may contain arguments which need to be evaluated, but
11051 which are not useful to determine the result of the call. In
11052 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11053 COMPOUND_EXPR will be an argument which must be evaluated.
11054 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11055 COMPOUND_EXPR in the chain will contain the tree for the simplified
11056 form of the builtin function call. */
11059 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11061 if (!validate_arg (s1
, POINTER_TYPE
)
11062 || !validate_arg (s2
, POINTER_TYPE
))
11067 const char *p1
, *p2
;
11069 p2
= c_getstr (s2
);
11073 p1
= c_getstr (s1
);
11076 const char *r
= strpbrk (p1
, p2
);
11080 return build_int_cst (TREE_TYPE (s1
), 0);
11082 /* Return an offset into the constant string argument. */
11083 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11084 return fold_convert_loc (loc
, type
, tem
);
11088 /* strpbrk(x, "") == NULL.
11089 Evaluate and ignore s1 in case it had side-effects. */
11090 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11093 return NULL_TREE
; /* Really call strpbrk. */
11095 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11099 /* New argument list transforming strpbrk(s1, s2) to
11100 strchr(s1, s2[0]). */
11101 return build_call_expr_loc (loc
, fn
, 2, s1
,
11102 build_int_cst (integer_type_node
, p2
[0]));
11106 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11109 Return NULL_TREE if no simplification was possible, otherwise return the
11110 simplified form of the call as a tree.
11112 The simplified form may be a constant or other expression which
11113 computes the same value, but in a more efficient manner (including
11114 calls to other builtin functions).
11116 The call may contain arguments which need to be evaluated, but
11117 which are not useful to determine the result of the call. In
11118 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11119 COMPOUND_EXPR will be an argument which must be evaluated.
11120 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11121 COMPOUND_EXPR in the chain will contain the tree for the simplified
11122 form of the builtin function call. */
11125 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11127 if (!validate_arg (s1
, POINTER_TYPE
)
11128 || !validate_arg (s2
, POINTER_TYPE
))
11132 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11134 /* If both arguments are constants, evaluate at compile-time. */
11137 const size_t r
= strspn (p1
, p2
);
11138 return build_int_cst (size_type_node
, r
);
11141 /* If either argument is "", return NULL_TREE. */
11142 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11143 /* Evaluate and ignore both arguments in case either one has
11145 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11151 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11154 Return NULL_TREE if no simplification was possible, otherwise return the
11155 simplified form of the call as a tree.
11157 The simplified form may be a constant or other expression which
11158 computes the same value, but in a more efficient manner (including
11159 calls to other builtin functions).
11161 The call may contain arguments which need to be evaluated, but
11162 which are not useful to determine the result of the call. In
11163 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11164 COMPOUND_EXPR will be an argument which must be evaluated.
11165 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11166 COMPOUND_EXPR in the chain will contain the tree for the simplified
11167 form of the builtin function call. */
11170 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11172 if (!validate_arg (s1
, POINTER_TYPE
)
11173 || !validate_arg (s2
, POINTER_TYPE
))
11177 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11179 /* If both arguments are constants, evaluate at compile-time. */
11182 const size_t r
= strcspn (p1
, p2
);
11183 return build_int_cst (size_type_node
, r
);
11186 /* If the first argument is "", return NULL_TREE. */
11187 if (p1
&& *p1
== '\0')
11189 /* Evaluate and ignore argument s2 in case it has
11191 return omit_one_operand_loc (loc
, size_type_node
,
11192 size_zero_node
, s2
);
11195 /* If the second argument is "", return __builtin_strlen(s1). */
11196 if (p2
&& *p2
== '\0')
11198 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11200 /* If the replacement _DECL isn't initialized, don't do the
11205 return build_call_expr_loc (loc
, fn
, 1, s1
);
11211 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11212 produced. False otherwise. This is done so that we don't output the error
11213 or warning twice or three times. */
11216 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11218 tree fntype
= TREE_TYPE (current_function_decl
);
11219 int nargs
= call_expr_nargs (exp
);
11221 /* There is good chance the current input_location points inside the
11222 definition of the va_start macro (perhaps on the token for
11223 builtin) in a system header, so warnings will not be emitted.
11224 Use the location in real source code. */
11225 source_location current_location
=
11226 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11229 if (!stdarg_p (fntype
))
11231 error ("%<va_start%> used in function with fixed args");
11237 if (va_start_p
&& (nargs
!= 2))
11239 error ("wrong number of arguments to function %<va_start%>");
11242 arg
= CALL_EXPR_ARG (exp
, 1);
11244 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11245 when we checked the arguments and if needed issued a warning. */
11250 /* Evidently an out of date version of <stdarg.h>; can't validate
11251 va_start's second argument, but can still work as intended. */
11252 warning_at (current_location
,
11254 "%<__builtin_next_arg%> called without an argument");
11257 else if (nargs
> 1)
11259 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11262 arg
= CALL_EXPR_ARG (exp
, 0);
11265 if (TREE_CODE (arg
) == SSA_NAME
)
11266 arg
= SSA_NAME_VAR (arg
);
11268 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11269 or __builtin_next_arg (0) the first time we see it, after checking
11270 the arguments and if needed issuing a warning. */
11271 if (!integer_zerop (arg
))
11273 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11275 /* Strip off all nops for the sake of the comparison. This
11276 is not quite the same as STRIP_NOPS. It does more.
11277 We must also strip off INDIRECT_EXPR for C++ reference
11279 while (CONVERT_EXPR_P (arg
)
11280 || TREE_CODE (arg
) == INDIRECT_REF
)
11281 arg
= TREE_OPERAND (arg
, 0);
11282 if (arg
!= last_parm
)
11284 /* FIXME: Sometimes with the tree optimizers we can get the
11285 not the last argument even though the user used the last
11286 argument. We just warn and set the arg to be the last
11287 argument so that we will get wrong-code because of
11289 warning_at (current_location
,
11291 "second parameter of %<va_start%> not last named argument");
11294 /* Undefined by C99 7.15.1.4p4 (va_start):
11295 "If the parameter parmN is declared with the register storage
11296 class, with a function or array type, or with a type that is
11297 not compatible with the type that results after application of
11298 the default argument promotions, the behavior is undefined."
11300 else if (DECL_REGISTER (arg
))
11302 warning_at (current_location
,
11304 "undefined behaviour when second parameter of "
11305 "%<va_start%> is declared with %<register%> storage");
11308 /* We want to verify the second parameter just once before the tree
11309 optimizers are run and then avoid keeping it in the tree,
11310 as otherwise we could warn even for correct code like:
11311 void foo (int i, ...)
11312 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11314 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11316 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11322 /* Expand a call EXP to __builtin_object_size. */
11325 expand_builtin_object_size (tree exp
)
11328 int object_size_type
;
11329 tree fndecl
= get_callee_fndecl (exp
);
11331 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11333 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11335 expand_builtin_trap ();
11339 ost
= CALL_EXPR_ARG (exp
, 1);
11342 if (TREE_CODE (ost
) != INTEGER_CST
11343 || tree_int_cst_sgn (ost
) < 0
11344 || compare_tree_int (ost
, 3) > 0)
11346 error ("%Klast argument of %D is not integer constant between 0 and 3",
11348 expand_builtin_trap ();
11352 object_size_type
= tree_to_shwi (ost
);
11354 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11357 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11358 FCODE is the BUILT_IN_* to use.
11359 Return NULL_RTX if we failed; the caller should emit a normal call,
11360 otherwise try to get the result in TARGET, if convenient (and in
11361 mode MODE if that's convenient). */
11364 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11365 enum built_in_function fcode
)
11367 tree dest
, src
, len
, size
;
11369 if (!validate_arglist (exp
,
11371 fcode
== BUILT_IN_MEMSET_CHK
11372 ? INTEGER_TYPE
: POINTER_TYPE
,
11373 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11376 dest
= CALL_EXPR_ARG (exp
, 0);
11377 src
= CALL_EXPR_ARG (exp
, 1);
11378 len
= CALL_EXPR_ARG (exp
, 2);
11379 size
= CALL_EXPR_ARG (exp
, 3);
11381 if (! tree_fits_uhwi_p (size
))
11384 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11388 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11390 warning_at (tree_nonartificial_location (exp
),
11391 0, "%Kcall to %D will always overflow destination buffer",
11392 exp
, get_callee_fndecl (exp
));
11397 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11398 mem{cpy,pcpy,move,set} is available. */
11401 case BUILT_IN_MEMCPY_CHK
:
11402 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11404 case BUILT_IN_MEMPCPY_CHK
:
11405 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11407 case BUILT_IN_MEMMOVE_CHK
:
11408 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11410 case BUILT_IN_MEMSET_CHK
:
11411 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11420 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11421 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11422 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11423 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11425 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11429 unsigned int dest_align
= get_pointer_alignment (dest
);
11431 /* If DEST is not a pointer type, call the normal function. */
11432 if (dest_align
== 0)
11435 /* If SRC and DEST are the same (and not volatile), do nothing. */
11436 if (operand_equal_p (src
, dest
, 0))
11440 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11442 /* Evaluate and ignore LEN in case it has side-effects. */
11443 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11444 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11447 expr
= fold_build_pointer_plus (dest
, len
);
11448 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11451 /* __memmove_chk special case. */
11452 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11454 unsigned int src_align
= get_pointer_alignment (src
);
11456 if (src_align
== 0)
11459 /* If src is categorized for a readonly section we can use
11460 normal __memcpy_chk. */
11461 if (readonly_data_expr (src
))
11463 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11466 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11467 dest
, src
, len
, size
);
11468 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11469 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11470 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11477 /* Emit warning if a buffer overflow is detected at compile time. */
11480 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11484 location_t loc
= tree_nonartificial_location (exp
);
11488 case BUILT_IN_STRCPY_CHK
:
11489 case BUILT_IN_STPCPY_CHK
:
11490 /* For __strcat_chk the warning will be emitted only if overflowing
11491 by at least strlen (dest) + 1 bytes. */
11492 case BUILT_IN_STRCAT_CHK
:
11493 len
= CALL_EXPR_ARG (exp
, 1);
11494 size
= CALL_EXPR_ARG (exp
, 2);
11497 case BUILT_IN_STRNCAT_CHK
:
11498 case BUILT_IN_STRNCPY_CHK
:
11499 case BUILT_IN_STPNCPY_CHK
:
11500 len
= CALL_EXPR_ARG (exp
, 2);
11501 size
= CALL_EXPR_ARG (exp
, 3);
11503 case BUILT_IN_SNPRINTF_CHK
:
11504 case BUILT_IN_VSNPRINTF_CHK
:
11505 len
= CALL_EXPR_ARG (exp
, 1);
11506 size
= CALL_EXPR_ARG (exp
, 3);
11509 gcc_unreachable ();
11515 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11520 len
= c_strlen (len
, 1);
11521 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11524 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11526 tree src
= CALL_EXPR_ARG (exp
, 1);
11527 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11529 src
= c_strlen (src
, 1);
11530 if (! src
|| ! tree_fits_uhwi_p (src
))
11532 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11533 exp
, get_callee_fndecl (exp
));
11536 else if (tree_int_cst_lt (src
, size
))
11539 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11542 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11543 exp
, get_callee_fndecl (exp
));
11546 /* Emit warning if a buffer overflow is detected at compile time
11547 in __sprintf_chk/__vsprintf_chk calls. */
11550 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11552 tree size
, len
, fmt
;
11553 const char *fmt_str
;
11554 int nargs
= call_expr_nargs (exp
);
11556 /* Verify the required arguments in the original call. */
11560 size
= CALL_EXPR_ARG (exp
, 2);
11561 fmt
= CALL_EXPR_ARG (exp
, 3);
11563 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11566 /* Check whether the format is a literal string constant. */
11567 fmt_str
= c_getstr (fmt
);
11568 if (fmt_str
== NULL
)
11571 if (!init_target_chars ())
11574 /* If the format doesn't contain % args or %%, we know its size. */
11575 if (strchr (fmt_str
, target_percent
) == 0)
11576 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11577 /* If the format is "%s" and first ... argument is a string literal,
11579 else if (fcode
== BUILT_IN_SPRINTF_CHK
11580 && strcmp (fmt_str
, target_percent_s
) == 0)
11586 arg
= CALL_EXPR_ARG (exp
, 4);
11587 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11590 len
= c_strlen (arg
, 1);
11591 if (!len
|| ! tree_fits_uhwi_p (len
))
11597 if (! tree_int_cst_lt (len
, size
))
11598 warning_at (tree_nonartificial_location (exp
),
11599 0, "%Kcall to %D will always overflow destination buffer",
11600 exp
, get_callee_fndecl (exp
));
11603 /* Emit warning if a free is called with address of a variable. */
11606 maybe_emit_free_warning (tree exp
)
11608 tree arg
= CALL_EXPR_ARG (exp
, 0);
11611 if (TREE_CODE (arg
) != ADDR_EXPR
)
11614 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11615 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11618 if (SSA_VAR_P (arg
))
11619 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11620 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11622 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11623 "%Kattempt to free a non-heap object", exp
);
11626 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11630 fold_builtin_object_size (tree ptr
, tree ost
)
11632 unsigned HOST_WIDE_INT bytes
;
11633 int object_size_type
;
11635 if (!validate_arg (ptr
, POINTER_TYPE
)
11636 || !validate_arg (ost
, INTEGER_TYPE
))
11641 if (TREE_CODE (ost
) != INTEGER_CST
11642 || tree_int_cst_sgn (ost
) < 0
11643 || compare_tree_int (ost
, 3) > 0)
11646 object_size_type
= tree_to_shwi (ost
);
11648 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11649 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11650 and (size_t) 0 for types 2 and 3. */
11651 if (TREE_SIDE_EFFECTS (ptr
))
11652 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11654 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11656 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11657 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11658 return build_int_cstu (size_type_node
, bytes
);
11660 else if (TREE_CODE (ptr
) == SSA_NAME
)
11662 /* If object size is not known yet, delay folding until
11663 later. Maybe subsequent passes will help determining
11665 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11666 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11667 && wi::fits_to_tree_p (bytes
, size_type_node
))
11668 return build_int_cstu (size_type_node
, bytes
);
11674 /* Builtins with folding operations that operate on "..." arguments
11675 need special handling; we need to store the arguments in a convenient
11676 data structure before attempting any folding. Fortunately there are
11677 only a few builtins that fall into this category. FNDECL is the
11678 function, EXP is the CALL_EXPR for the call. */
11681 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11683 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11684 tree ret
= NULL_TREE
;
11688 case BUILT_IN_FPCLASSIFY
:
11689 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11697 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11698 SET_EXPR_LOCATION (ret
, loc
);
11699 TREE_NO_WARNING (ret
) = 1;
11705 /* Initialize format string characters in the target charset. */
11708 init_target_chars (void)
11713 target_newline
= lang_hooks
.to_target_charset ('\n');
11714 target_percent
= lang_hooks
.to_target_charset ('%');
11715 target_c
= lang_hooks
.to_target_charset ('c');
11716 target_s
= lang_hooks
.to_target_charset ('s');
11717 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11721 target_percent_c
[0] = target_percent
;
11722 target_percent_c
[1] = target_c
;
11723 target_percent_c
[2] = '\0';
11725 target_percent_s
[0] = target_percent
;
11726 target_percent_s
[1] = target_s
;
11727 target_percent_s
[2] = '\0';
11729 target_percent_s_newline
[0] = target_percent
;
11730 target_percent_s_newline
[1] = target_s
;
11731 target_percent_s_newline
[2] = target_newline
;
11732 target_percent_s_newline
[3] = '\0';
11739 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11740 and no overflow/underflow occurred. INEXACT is true if M was not
11741 exactly calculated. TYPE is the tree type for the result. This
11742 function assumes that you cleared the MPFR flags and then
11743 calculated M to see if anything subsequently set a flag prior to
11744 entering this function. Return NULL_TREE if any checks fail. */
11747 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11749 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11750 overflow/underflow occurred. If -frounding-math, proceed iff the
11751 result of calling FUNC was exact. */
11752 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11753 && (!flag_rounding_math
|| !inexact
))
11755 REAL_VALUE_TYPE rr
;
11757 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11758 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11759 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11760 but the mpft_t is not, then we underflowed in the
11762 if (real_isfinite (&rr
)
11763 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11765 REAL_VALUE_TYPE rmode
;
11767 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11768 /* Proceed iff the specified mode can hold the value. */
11769 if (real_identical (&rmode
, &rr
))
11770 return build_real (type
, rmode
);
11776 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11777 number and no overflow/underflow occurred. INEXACT is true if M
11778 was not exactly calculated. TYPE is the tree type for the result.
11779 This function assumes that you cleared the MPFR flags and then
11780 calculated M to see if anything subsequently set a flag prior to
11781 entering this function. Return NULL_TREE if any checks fail, if
11782 FORCE_CONVERT is true, then bypass the checks. */
11785 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11787 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11788 overflow/underflow occurred. If -frounding-math, proceed iff the
11789 result of calling FUNC was exact. */
11791 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11792 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11793 && (!flag_rounding_math
|| !inexact
)))
11795 REAL_VALUE_TYPE re
, im
;
11797 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11798 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11799 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11800 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11801 but the mpft_t is not, then we underflowed in the
11804 || (real_isfinite (&re
) && real_isfinite (&im
)
11805 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11806 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11808 REAL_VALUE_TYPE re_mode
, im_mode
;
11810 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11811 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11812 /* Proceed iff the specified mode can hold the value. */
11814 || (real_identical (&re_mode
, &re
)
11815 && real_identical (&im_mode
, &im
)))
11816 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11817 build_real (TREE_TYPE (type
), im_mode
));
11823 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11824 FUNC on it and return the resulting value as a tree with type TYPE.
11825 If MIN and/or MAX are not NULL, then the supplied ARG must be
11826 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11827 acceptable values, otherwise they are not. The mpfr precision is
11828 set to the precision of TYPE. We assume that function FUNC returns
11829 zero if the result could be calculated exactly within the requested
11833 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11834 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11837 tree result
= NULL_TREE
;
11841 /* To proceed, MPFR must exactly represent the target floating point
11842 format, which only happens when the target base equals two. */
11843 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11844 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11846 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11848 if (real_isfinite (ra
)
11849 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11850 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11852 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11853 const int prec
= fmt
->p
;
11854 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11858 mpfr_init2 (m
, prec
);
11859 mpfr_from_real (m
, ra
, GMP_RNDN
);
11860 mpfr_clear_flags ();
11861 inexact
= func (m
, m
, rnd
);
11862 result
= do_mpfr_ckconv (m
, type
, inexact
);
11870 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11871 FUNC on it and return the resulting value as a tree with type TYPE.
11872 The mpfr precision is set to the precision of TYPE. We assume that
11873 function FUNC returns zero if the result could be calculated
11874 exactly within the requested precision. */
11877 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11878 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11880 tree result
= NULL_TREE
;
11885 /* To proceed, MPFR must exactly represent the target floating point
11886 format, which only happens when the target base equals two. */
11887 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11888 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11889 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11891 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11892 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11894 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11896 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11897 const int prec
= fmt
->p
;
11898 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11902 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11903 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11904 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11905 mpfr_clear_flags ();
11906 inexact
= func (m1
, m1
, m2
, rnd
);
11907 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11908 mpfr_clears (m1
, m2
, NULL
);
11915 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11916 FUNC on it and return the resulting value as a tree with type TYPE.
11917 The mpfr precision is set to the precision of TYPE. We assume that
11918 function FUNC returns zero if the result could be calculated
11919 exactly within the requested precision. */
11922 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11923 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11925 tree result
= NULL_TREE
;
11931 /* To proceed, MPFR must exactly represent the target floating point
11932 format, which only happens when the target base equals two. */
11933 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11934 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11935 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11936 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11938 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11939 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11940 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11942 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11944 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11945 const int prec
= fmt
->p
;
11946 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11950 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11951 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11952 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11953 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11954 mpfr_clear_flags ();
11955 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11956 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11957 mpfr_clears (m1
, m2
, m3
, NULL
);
11964 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11965 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11966 If ARG_SINP and ARG_COSP are NULL then the result is returned
11967 as a complex value.
11968 The type is taken from the type of ARG and is used for setting the
11969 precision of the calculation and results. */
11972 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11974 tree
const type
= TREE_TYPE (arg
);
11975 tree result
= NULL_TREE
;
11979 /* To proceed, MPFR must exactly represent the target floating point
11980 format, which only happens when the target base equals two. */
11981 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11982 && TREE_CODE (arg
) == REAL_CST
11983 && !TREE_OVERFLOW (arg
))
11985 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11987 if (real_isfinite (ra
))
11989 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11990 const int prec
= fmt
->p
;
11991 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11992 tree result_s
, result_c
;
11996 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11997 mpfr_from_real (m
, ra
, GMP_RNDN
);
11998 mpfr_clear_flags ();
11999 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
12000 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12001 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12002 mpfr_clears (m
, ms
, mc
, NULL
);
12003 if (result_s
&& result_c
)
12005 /* If we are to return in a complex value do so. */
12006 if (!arg_sinp
&& !arg_cosp
)
12007 return build_complex (build_complex_type (type
),
12008 result_c
, result_s
);
12010 /* Dereference the sin/cos pointer arguments. */
12011 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12012 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12013 /* Proceed if valid pointer type were passed in. */
12014 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12015 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12017 /* Set the values. */
12018 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12020 TREE_SIDE_EFFECTS (result_s
) = 1;
12021 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12023 TREE_SIDE_EFFECTS (result_c
) = 1;
12024 /* Combine the assignments into a compound expr. */
12025 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12026 result_s
, result_c
));
12034 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12035 two-argument mpfr order N Bessel function FUNC on them and return
12036 the resulting value as a tree with type TYPE. The mpfr precision
12037 is set to the precision of TYPE. We assume that function FUNC
12038 returns zero if the result could be calculated exactly within the
12039 requested precision. */
12041 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12042 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12043 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12045 tree result
= NULL_TREE
;
12050 /* To proceed, MPFR must exactly represent the target floating point
12051 format, which only happens when the target base equals two. */
12052 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12053 && tree_fits_shwi_p (arg1
)
12054 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12056 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12057 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12060 && real_isfinite (ra
)
12061 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12063 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12064 const int prec
= fmt
->p
;
12065 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12069 mpfr_init2 (m
, prec
);
12070 mpfr_from_real (m
, ra
, GMP_RNDN
);
12071 mpfr_clear_flags ();
12072 inexact
= func (m
, n
, m
, rnd
);
12073 result
= do_mpfr_ckconv (m
, type
, inexact
);
12081 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12082 the pointer *(ARG_QUO) and return the result. The type is taken
12083 from the type of ARG0 and is used for setting the precision of the
12084 calculation and results. */
12087 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12089 tree
const type
= TREE_TYPE (arg0
);
12090 tree result
= NULL_TREE
;
12095 /* To proceed, MPFR must exactly represent the target floating point
12096 format, which only happens when the target base equals two. */
12097 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12098 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12099 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12101 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12102 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12104 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12106 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12107 const int prec
= fmt
->p
;
12108 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12113 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12114 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12115 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12116 mpfr_clear_flags ();
12117 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12118 /* Remquo is independent of the rounding mode, so pass
12119 inexact=0 to do_mpfr_ckconv(). */
12120 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12121 mpfr_clears (m0
, m1
, NULL
);
12124 /* MPFR calculates quo in the host's long so it may
12125 return more bits in quo than the target int can hold
12126 if sizeof(host long) > sizeof(target int). This can
12127 happen even for native compilers in LP64 mode. In
12128 these cases, modulo the quo value with the largest
12129 number that the target int can hold while leaving one
12130 bit for the sign. */
12131 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12132 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12134 /* Dereference the quo pointer argument. */
12135 arg_quo
= build_fold_indirect_ref (arg_quo
);
12136 /* Proceed iff a valid pointer type was passed in. */
12137 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12139 /* Set the value. */
12141 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12142 build_int_cst (TREE_TYPE (arg_quo
),
12144 TREE_SIDE_EFFECTS (result_quo
) = 1;
12145 /* Combine the quo assignment with the rem. */
12146 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12147 result_quo
, result_rem
));
12155 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12156 resulting value as a tree with type TYPE. The mpfr precision is
12157 set to the precision of TYPE. We assume that this mpfr function
12158 returns zero if the result could be calculated exactly within the
12159 requested precision. In addition, the integer pointer represented
12160 by ARG_SG will be dereferenced and set to the appropriate signgam
12164 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12166 tree result
= NULL_TREE
;
12170 /* To proceed, MPFR must exactly represent the target floating point
12171 format, which only happens when the target base equals two. Also
12172 verify ARG is a constant and that ARG_SG is an int pointer. */
12173 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12174 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12175 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12176 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12178 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12180 /* In addition to NaN and Inf, the argument cannot be zero or a
12181 negative integer. */
12182 if (real_isfinite (ra
)
12183 && ra
->cl
!= rvc_zero
12184 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12186 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12187 const int prec
= fmt
->p
;
12188 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12193 mpfr_init2 (m
, prec
);
12194 mpfr_from_real (m
, ra
, GMP_RNDN
);
12195 mpfr_clear_flags ();
12196 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12197 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12203 /* Dereference the arg_sg pointer argument. */
12204 arg_sg
= build_fold_indirect_ref (arg_sg
);
12205 /* Assign the signgam value into *arg_sg. */
12206 result_sg
= fold_build2 (MODIFY_EXPR
,
12207 TREE_TYPE (arg_sg
), arg_sg
,
12208 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12209 TREE_SIDE_EFFECTS (result_sg
) = 1;
12210 /* Combine the signgam assignment with the lgamma result. */
12211 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12212 result_sg
, result_lg
));
12220 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12221 function FUNC on it and return the resulting value as a tree with
12222 type TYPE. The mpfr precision is set to the precision of TYPE. We
12223 assume that function FUNC returns zero if the result could be
12224 calculated exactly within the requested precision. */
12227 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12229 tree result
= NULL_TREE
;
12233 /* To proceed, MPFR must exactly represent the target floating point
12234 format, which only happens when the target base equals two. */
12235 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12236 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12237 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12239 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12240 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12242 if (real_isfinite (re
) && real_isfinite (im
))
12244 const struct real_format
*const fmt
=
12245 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12246 const int prec
= fmt
->p
;
12247 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12248 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12252 mpc_init2 (m
, prec
);
12253 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12254 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12255 mpfr_clear_flags ();
12256 inexact
= func (m
, m
, crnd
);
12257 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12265 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12266 mpc function FUNC on it and return the resulting value as a tree
12267 with type TYPE. The mpfr precision is set to the precision of
12268 TYPE. We assume that function FUNC returns zero if the result
12269 could be calculated exactly within the requested precision. If
12270 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12271 in the arguments and/or results. */
12274 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12275 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12277 tree result
= NULL_TREE
;
12282 /* To proceed, MPFR must exactly represent the target floating point
12283 format, which only happens when the target base equals two. */
12284 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12285 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12286 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12287 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12288 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12290 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12291 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12292 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12293 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12296 || (real_isfinite (re0
) && real_isfinite (im0
)
12297 && real_isfinite (re1
) && real_isfinite (im1
)))
12299 const struct real_format
*const fmt
=
12300 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12301 const int prec
= fmt
->p
;
12302 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12303 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12307 mpc_init2 (m0
, prec
);
12308 mpc_init2 (m1
, prec
);
12309 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12310 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12311 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12312 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12313 mpfr_clear_flags ();
12314 inexact
= func (m0
, m0
, m1
, crnd
);
12315 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12324 /* A wrapper function for builtin folding that prevents warnings for
12325 "statement without effect" and the like, caused by removing the
12326 call node earlier than the warning is generated. */
12329 fold_call_stmt (gcall
*stmt
, bool ignore
)
12331 tree ret
= NULL_TREE
;
12332 tree fndecl
= gimple_call_fndecl (stmt
);
12333 location_t loc
= gimple_location (stmt
);
12335 && TREE_CODE (fndecl
) == FUNCTION_DECL
12336 && DECL_BUILT_IN (fndecl
)
12337 && !gimple_call_va_arg_pack_p (stmt
))
12339 int nargs
= gimple_call_num_args (stmt
);
12340 tree
*args
= (nargs
> 0
12341 ? gimple_call_arg_ptr (stmt
, 0)
12342 : &error_mark_node
);
12344 if (avoid_folding_inline_builtin (fndecl
))
12346 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12348 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12352 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12355 /* Propagate location information from original call to
12356 expansion of builtin. Otherwise things like
12357 maybe_emit_chk_warning, that operate on the expansion
12358 of a builtin, will use the wrong location information. */
12359 if (gimple_has_location (stmt
))
12361 tree realret
= ret
;
12362 if (TREE_CODE (ret
) == NOP_EXPR
)
12363 realret
= TREE_OPERAND (ret
, 0);
12364 if (CAN_HAVE_LOCATION_P (realret
)
12365 && !EXPR_HAS_LOCATION (realret
))
12366 SET_EXPR_LOCATION (realret
, loc
);
12376 /* Look up the function in builtin_decl that corresponds to DECL
12377 and set ASMSPEC as its user assembler name. DECL must be a
12378 function decl that declares a builtin. */
12381 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12384 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12385 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12388 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12389 set_user_assembler_name (builtin
, asmspec
);
12390 switch (DECL_FUNCTION_CODE (decl
))
12392 case BUILT_IN_MEMCPY
:
12393 init_block_move_fn (asmspec
);
12394 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12396 case BUILT_IN_MEMSET
:
12397 init_block_clear_fn (asmspec
);
12398 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12400 case BUILT_IN_MEMMOVE
:
12401 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12403 case BUILT_IN_MEMCMP
:
12404 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12406 case BUILT_IN_ABORT
:
12407 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12410 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12412 set_user_assembler_libfunc ("ffs", asmspec
);
12413 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12414 MODE_INT
, 0), "ffs");
12422 /* Return true if DECL is a builtin that expands to a constant or similarly
12425 is_simple_builtin (tree decl
)
12427 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12428 switch (DECL_FUNCTION_CODE (decl
))
12430 /* Builtins that expand to constants. */
12431 case BUILT_IN_CONSTANT_P
:
12432 case BUILT_IN_EXPECT
:
12433 case BUILT_IN_OBJECT_SIZE
:
12434 case BUILT_IN_UNREACHABLE
:
12435 /* Simple register moves or loads from stack. */
12436 case BUILT_IN_ASSUME_ALIGNED
:
12437 case BUILT_IN_RETURN_ADDRESS
:
12438 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12439 case BUILT_IN_FROB_RETURN_ADDR
:
12440 case BUILT_IN_RETURN
:
12441 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12442 case BUILT_IN_FRAME_ADDRESS
:
12443 case BUILT_IN_VA_END
:
12444 case BUILT_IN_STACK_SAVE
:
12445 case BUILT_IN_STACK_RESTORE
:
12446 /* Exception state returns or moves registers around. */
12447 case BUILT_IN_EH_FILTER
:
12448 case BUILT_IN_EH_POINTER
:
12449 case BUILT_IN_EH_COPY_VALUES
:
12459 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12460 most probably expanded inline into reasonably simple code. This is a
12461 superset of is_simple_builtin. */
12463 is_inexpensive_builtin (tree decl
)
12467 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12469 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12470 switch (DECL_FUNCTION_CODE (decl
))
12473 case BUILT_IN_ALLOCA
:
12474 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12475 case BUILT_IN_BSWAP16
:
12476 case BUILT_IN_BSWAP32
:
12477 case BUILT_IN_BSWAP64
:
12479 case BUILT_IN_CLZIMAX
:
12480 case BUILT_IN_CLZL
:
12481 case BUILT_IN_CLZLL
:
12483 case BUILT_IN_CTZIMAX
:
12484 case BUILT_IN_CTZL
:
12485 case BUILT_IN_CTZLL
:
12487 case BUILT_IN_FFSIMAX
:
12488 case BUILT_IN_FFSL
:
12489 case BUILT_IN_FFSLL
:
12490 case BUILT_IN_IMAXABS
:
12491 case BUILT_IN_FINITE
:
12492 case BUILT_IN_FINITEF
:
12493 case BUILT_IN_FINITEL
:
12494 case BUILT_IN_FINITED32
:
12495 case BUILT_IN_FINITED64
:
12496 case BUILT_IN_FINITED128
:
12497 case BUILT_IN_FPCLASSIFY
:
12498 case BUILT_IN_ISFINITE
:
12499 case BUILT_IN_ISINF_SIGN
:
12500 case BUILT_IN_ISINF
:
12501 case BUILT_IN_ISINFF
:
12502 case BUILT_IN_ISINFL
:
12503 case BUILT_IN_ISINFD32
:
12504 case BUILT_IN_ISINFD64
:
12505 case BUILT_IN_ISINFD128
:
12506 case BUILT_IN_ISNAN
:
12507 case BUILT_IN_ISNANF
:
12508 case BUILT_IN_ISNANL
:
12509 case BUILT_IN_ISNAND32
:
12510 case BUILT_IN_ISNAND64
:
12511 case BUILT_IN_ISNAND128
:
12512 case BUILT_IN_ISNORMAL
:
12513 case BUILT_IN_ISGREATER
:
12514 case BUILT_IN_ISGREATEREQUAL
:
12515 case BUILT_IN_ISLESS
:
12516 case BUILT_IN_ISLESSEQUAL
:
12517 case BUILT_IN_ISLESSGREATER
:
12518 case BUILT_IN_ISUNORDERED
:
12519 case BUILT_IN_VA_ARG_PACK
:
12520 case BUILT_IN_VA_ARG_PACK_LEN
:
12521 case BUILT_IN_VA_COPY
:
12522 case BUILT_IN_TRAP
:
12523 case BUILT_IN_SAVEREGS
:
12524 case BUILT_IN_POPCOUNTL
:
12525 case BUILT_IN_POPCOUNTLL
:
12526 case BUILT_IN_POPCOUNTIMAX
:
12527 case BUILT_IN_POPCOUNT
:
12528 case BUILT_IN_PARITYL
:
12529 case BUILT_IN_PARITYLL
:
12530 case BUILT_IN_PARITYIMAX
:
12531 case BUILT_IN_PARITY
:
12532 case BUILT_IN_LABS
:
12533 case BUILT_IN_LLABS
:
12534 case BUILT_IN_PREFETCH
:
12535 case BUILT_IN_ACC_ON_DEVICE
:
12539 return is_simple_builtin (decl
);