1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
36 #include "stringpool.h"
38 #include "tree-ssanames.h"
43 #include "diagnostic-core.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
51 #include "tree-object-size.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins
;
78 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names
[BUILT_IN_LAST
]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names
[(int) END_BUILTINS
] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info
[(int)END_BUILTINS
];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p
;
98 static rtx
c_readstr (const char *, scalar_int_mode
);
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
114 static rtx
expand_builtin_sincos (tree
);
115 static rtx
expand_builtin_cexpi (tree
, rtx
);
116 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
117 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
118 static rtx
expand_builtin_next_arg (void);
119 static rtx
expand_builtin_va_start (tree
);
120 static rtx
expand_builtin_va_end (tree
);
121 static rtx
expand_builtin_va_copy (tree
);
122 static rtx
inline_expand_builtin_string_cmp (tree
, rtx
);
123 static rtx
expand_builtin_strcmp (tree
, rtx
);
124 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
125 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
126 static rtx
expand_builtin_memchr (tree
, rtx
);
127 static rtx
expand_builtin_memcpy (tree
, rtx
);
128 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
129 rtx target
, tree exp
, int endp
);
130 static rtx
expand_builtin_memmove (tree
, rtx
);
131 static rtx
expand_builtin_mempcpy (tree
, rtx
);
132 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, int);
133 static rtx
expand_builtin_strcat (tree
, rtx
);
134 static rtx
expand_builtin_strcpy (tree
, rtx
);
135 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
136 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
137 static rtx
expand_builtin_stpncpy (tree
, rtx
);
138 static rtx
expand_builtin_strncat (tree
, rtx
);
139 static rtx
expand_builtin_strncpy (tree
, rtx
);
140 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
141 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_alloca (tree
);
147 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
148 static rtx
expand_builtin_frame_address (tree
, tree
);
149 static tree
stabilize_va_list_loc (location_t
, tree
, int);
150 static rtx
expand_builtin_expect (tree
, rtx
);
151 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
152 static tree
fold_builtin_constant_p (tree
);
153 static tree
fold_builtin_classify_type (tree
);
154 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
155 static tree
fold_builtin_inf (location_t
, tree
, int);
156 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
157 static bool validate_arg (const_tree
, enum tree_code code
);
158 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
159 static rtx
expand_builtin_signbit (tree
, rtx
);
160 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_isascii (location_t
, tree
);
162 static tree
fold_builtin_toascii (location_t
, tree
);
163 static tree
fold_builtin_isdigit (location_t
, tree
);
164 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
165 static tree
fold_builtin_abs (location_t
, tree
, tree
);
166 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
168 static tree
fold_builtin_0 (location_t
, tree
);
169 static tree
fold_builtin_1 (location_t
, tree
, tree
);
170 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
171 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
172 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
174 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
175 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
176 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
178 static rtx
expand_builtin_object_size (tree
);
179 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
180 enum built_in_function
);
181 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
182 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
183 static void maybe_emit_free_warning (tree
);
184 static tree
fold_builtin_object_size (tree
, tree
);
186 unsigned HOST_WIDE_INT target_newline
;
187 unsigned HOST_WIDE_INT target_percent
;
188 static unsigned HOST_WIDE_INT target_c
;
189 static unsigned HOST_WIDE_INT target_s
;
190 char target_percent_c
[3];
191 char target_percent_s
[3];
192 char target_percent_s_newline
[4];
193 static tree
do_mpfr_remquo (tree
, tree
, tree
);
194 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
200 is_builtin_name (const char *name
)
202 if (strncmp (name
, "__builtin_", 10) == 0)
204 if (strncmp (name
, "__sync_", 7) == 0)
206 if (strncmp (name
, "__atomic_", 9) == 0)
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
216 called_as_built_in (tree node
)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
221 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
222 return is_builtin_name (name
);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
241 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
242 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
244 poly_int64 bitsize
, bitpos
;
247 int unsignedp
, reversep
, volatilep
;
248 unsigned int align
= BITS_PER_UNIT
;
249 bool known_alignment
= false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
254 &unsignedp
, &reversep
, &volatilep
);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp
) == FUNCTION_DECL
)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
265 align
= 2 * BITS_PER_UNIT
;
267 else if (TREE_CODE (exp
) == LABEL_DECL
)
269 else if (TREE_CODE (exp
) == CONST_DECL
)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp
= DECL_INITIAL (exp
);
273 align
= TYPE_ALIGN (TREE_TYPE (exp
));
274 if (CONSTANT_CLASS_P (exp
))
275 align
= targetm
.constant_alignment (exp
, align
);
277 known_alignment
= true;
279 else if (DECL_P (exp
))
281 align
= DECL_ALIGN (exp
);
282 known_alignment
= true;
284 else if (TREE_CODE (exp
) == INDIRECT_REF
285 || TREE_CODE (exp
) == MEM_REF
286 || TREE_CODE (exp
) == TARGET_MEM_REF
)
288 tree addr
= TREE_OPERAND (exp
, 0);
290 unsigned HOST_WIDE_INT ptr_bitpos
;
291 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr
) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
297 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
298 ptr_bitmask
*= BITS_PER_UNIT
;
299 align
= least_bit_hwi (ptr_bitmask
);
300 addr
= TREE_OPERAND (addr
, 0);
304 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
305 align
= MAX (ptr_align
, align
);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos
&= ptr_bitmask
;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
316 unsigned HOST_WIDE_INT step
= 1;
318 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
319 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
321 if (TMR_INDEX2 (exp
))
322 align
= BITS_PER_UNIT
;
323 known_alignment
= false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
332 if (!addr_p
&& !known_alignment
333 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
338 /* Else adjust bitpos accordingly. */
339 bitpos
+= ptr_bitpos
;
340 if (TREE_CODE (exp
) == MEM_REF
341 || TREE_CODE (exp
) == TARGET_MEM_REF
)
342 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
345 else if (TREE_CODE (exp
) == STRING_CST
)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align
= TYPE_ALIGN (TREE_TYPE (exp
));
350 if (CONSTANT_CLASS_P (exp
))
351 align
= targetm
.constant_alignment (exp
, align
);
353 known_alignment
= true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
360 unsigned int trailing_zeros
= tree_ctz (offset
);
361 if (trailing_zeros
< HOST_BITS_PER_INT
)
363 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
365 align
= MIN (align
, inner
);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
372 if (alt_align
!= 0 && alt_align
< align
)
375 known_alignment
= false;
379 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
380 return known_alignment
;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
390 unsigned HOST_WIDE_INT
*bitposp
)
392 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
395 /* Return the alignment in bits of EXP, an object. */
398 get_object_alignment (tree exp
)
400 unsigned HOST_WIDE_INT bitpos
= 0;
403 get_object_alignment_1 (exp
, &align
, &bitpos
);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
409 align
= least_bit_hwi (bitpos
);
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
421 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
422 unsigned HOST_WIDE_INT
*bitposp
)
426 if (TREE_CODE (exp
) == ADDR_EXPR
)
427 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
428 alignp
, bitposp
, true);
429 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
432 unsigned HOST_WIDE_INT bitpos
;
433 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
435 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
436 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
439 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
440 if (trailing_zeros
< HOST_BITS_PER_INT
)
442 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
444 align
= MIN (align
, inner
);
448 *bitposp
= bitpos
& (align
- 1);
451 else if (TREE_CODE (exp
) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp
)))
454 unsigned int ptr_align
, ptr_misalign
;
455 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
457 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
459 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
460 *alignp
= ptr_align
* BITS_PER_UNIT
;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
464 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
465 /* We cannot really tell whether this result is an approximation. */
471 *alignp
= BITS_PER_UNIT
;
475 else if (TREE_CODE (exp
) == INTEGER_CST
)
477 *alignp
= BIGGEST_ALIGNMENT
;
478 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
479 & (BIGGEST_ALIGNMENT
- 1));
484 *alignp
= BITS_PER_UNIT
;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
496 get_pointer_alignment (tree exp
)
498 unsigned HOST_WIDE_INT bitpos
= 0;
501 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
507 align
= least_bit_hwi (bitpos
);
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
519 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
525 /* Optimize the common case of plain char. */
526 for (n
= 0; n
< maxelts
; n
++)
528 const char *elt
= (const char*) ptr
+ n
;
535 for (n
= 0; n
< maxelts
; n
++)
537 const char *elt
= (const char*) ptr
+ n
* eltsize
;
538 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
550 warn_string_no_nul (location_t loc
, const char *fn
, tree arg
, tree decl
)
552 if (TREE_NO_WARNING (arg
))
555 loc
= expansion_point_location_if_in_system_header (loc
);
557 if (warning_at (loc
, OPT_Wstringop_overflow_
,
558 "%qs argument missing terminating nul", fn
))
560 inform (DECL_SOURCE_LOCATION (decl
),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg
) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
573 unterminated_array (tree exp
, tree
*size
/* = NULL */, bool *exact
/* = NULL */)
575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
578 memset (&data
, 0, sizeof (c_strlen_data
));
579 tree len
= c_strlen (exp
, 1, &data
);
580 if (len
== NULL_TREE
&& data
.len
&& data
.decl
)
587 /* Constant offsets are already accounted for in data.len, but
588 not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (data
.off
) == INTEGER_CST
)
591 else if (TREE_CODE (data
.off
) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (data
.off
, 1)) == INTEGER_CST
)
594 /* Subtract the offset from the size of the array. */
596 tree temp
= TREE_OPERAND (data
.off
, 1);
597 temp
= fold_convert (ssizetype
, temp
);
598 len
= fold_build2 (MINUS_EXPR
, ssizetype
, len
, temp
);
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if SRC references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
641 The value returned is of type `ssizetype'. */
644 c_strlen (tree src
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data
;
650 memset (&local_strlen_data
, 0, sizeof (c_strlen_data
));
652 data
= &local_strlen_data
;
654 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
656 if (TREE_CODE (src
) == COND_EXPR
657 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
661 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
662 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
663 if (tree_int_cst_equal (len1
, len2
))
667 if (TREE_CODE (src
) == COMPOUND_EXPR
668 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
669 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
671 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
673 /* Offset from the beginning of the string in bytes. */
677 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
681 /* Determine the size of the string element. */
682 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
691 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
692 strelts
= strelts
/ eltsize
;
694 if (!tree_fits_uhwi_p (memsize
))
697 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr
= TREE_STRING_POINTER (src
);
703 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
705 /* The code below works only for single byte character types. */
709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
712 start searching for it. */
713 unsigned len
= string_length (ptr
, eltsize
, strelts
);
715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
718 if (len
+ 1 < strelts
)
720 else if (len
>= maxelts
)
724 data
->len
= ssize_int (len
);
728 /* For empty strings the result should be zero. */
730 return ssize_int (0);
732 /* We don't know the starting offset, but we do know that the string
733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
739 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
740 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
742 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
744 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
745 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
746 build_zero_cst (ssizetype
));
749 /* Offset from the beginning of the string in elements. */
750 HOST_WIDE_INT eltoff
;
752 /* We have a known offset into the string. Start searching there for
753 a null character if we can represent it as a single HOST_WIDE_INT. */
756 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
759 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
761 /* If the offset is known to be out of bounds, warn, and call strlen at
763 if (eltoff
< 0 || eltoff
>= maxelts
)
765 /* Suppress multiple warnings for propagated constant strings. */
767 && !TREE_NO_WARNING (src
))
769 warning_at (loc
, OPT_Warray_bounds
,
770 "offset %qwi outside bounds of constant string",
772 TREE_NO_WARNING (src
) = 1;
777 /* If eltoff is larger than strelts but less than maxelts the
778 string length is zero, since the excess memory will be zero. */
779 if (eltoff
> strelts
)
780 return ssize_int (0);
782 /* Use strlen to search for the first zero byte. Since any strings
783 constructed with build_string will have nulls appended, we win even
784 if we get handed something like (char[4])"abcd".
786 Since ELTOFF is our starting index into the string, no further
787 calculation is needed. */
788 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
791 /* Don't know what to return if there was no zero termination.
792 Ideally this would turn into a gcc_checking_assert over time.
793 Set DECL/LEN so callers can examine them. */
794 if (len
>= maxelts
- eltoff
)
798 data
->len
= ssize_int (len
);
802 return ssize_int (len
);
805 /* Return a constant integer corresponding to target reading
806 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
809 c_readstr (const char *str
, scalar_int_mode mode
)
813 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
815 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
816 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
817 / HOST_BITS_PER_WIDE_INT
;
819 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
820 for (i
= 0; i
< len
; i
++)
824 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
827 if (WORDS_BIG_ENDIAN
)
828 j
= GET_MODE_SIZE (mode
) - i
- 1;
829 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
830 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
831 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
835 ch
= (unsigned char) str
[i
];
836 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
839 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
840 return immed_wide_int_const (c
, mode
);
843 /* Cast a target constant CST to target CHAR and if that value fits into
844 host char type, return zero and put that value into variable pointed to by
848 target_char_cast (tree cst
, char *p
)
850 unsigned HOST_WIDE_INT val
, hostval
;
852 if (TREE_CODE (cst
) != INTEGER_CST
853 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
856 /* Do not care if it fits or not right here. */
857 val
= TREE_INT_CST_LOW (cst
);
859 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
860 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
863 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
864 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874 in between the multiple evaluations. In particular, we assume that a
875 non-addressable local variable will not be modified. */
878 builtin_save_expr (tree exp
)
880 if (TREE_CODE (exp
) == SSA_NAME
881 || (TREE_ADDRESSABLE (exp
) == 0
882 && (TREE_CODE (exp
) == PARM_DECL
883 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
886 return save_expr (exp
);
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890 times to get the address of either a higher stack frame, or a return
891 address located within it (depending on FNDECL_CODE). */
894 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
897 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
900 /* For a zero count with __builtin_return_address, we don't care what
901 frame address we return, because target-specific definitions will
902 override us. Therefore frame pointer elimination is OK, and using
903 the soft frame pointer is OK.
905 For a nonzero count, or a zero count with __builtin_frame_address,
906 we require a stable offset from the current frame pointer to the
907 previous one, so we must use the hard frame pointer, and
908 we must disable frame pointer elimination. */
909 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
910 tem
= frame_pointer_rtx
;
913 tem
= hard_frame_pointer_rtx
;
915 /* Tell reload not to eliminate the frame pointer. */
916 crtl
->accesses_prior_frames
= 1;
921 SETUP_FRAME_ADDRESSES ();
923 /* On the SPARC, the return address is not in the frame, it is in a
924 register. There is no way to access it off of the current frame
925 pointer, but it can be accessed off the previous frame pointer by
926 reading the value from the register window save area. */
927 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
930 /* Scan back COUNT frames to the specified frame. */
931 for (i
= 0; i
< count
; i
++)
933 /* Assume the dynamic chain pointer is in the word that the
934 frame address points to, unless otherwise specified. */
935 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
936 tem
= memory_address (Pmode
, tem
);
937 tem
= gen_frame_mem (Pmode
, tem
);
938 tem
= copy_to_reg (tem
);
941 /* For __builtin_frame_address, return what we've got. But, on
942 the SPARC for example, we may have to add a bias. */
943 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
944 return FRAME_ADDR_RTX (tem
);
946 /* For __builtin_return_address, get the return address from that frame. */
947 #ifdef RETURN_ADDR_RTX
948 tem
= RETURN_ADDR_RTX (count
, tem
);
950 tem
= memory_address (Pmode
,
951 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
952 tem
= gen_frame_mem (Pmode
, tem
);
957 /* Alias set used for setjmp buffer. */
958 static alias_set_type setjmp_alias_set
= -1;
960 /* Construct the leading half of a __builtin_setjmp call. Control will
961 return to RECEIVER_LABEL. This is also called directly by the SJLJ
962 exception handling code. */
965 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
967 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
971 if (setjmp_alias_set
== -1)
972 setjmp_alias_set
= new_alias_set ();
974 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
976 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
978 /* We store the frame pointer and the address of receiver_label in
979 the buffer and use the rest of it for the stack save area, which
980 is machine-dependent. */
982 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
983 set_mem_alias_set (mem
, setjmp_alias_set
);
984 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
986 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
987 GET_MODE_SIZE (Pmode
))),
988 set_mem_alias_set (mem
, setjmp_alias_set
);
990 emit_move_insn (validize_mem (mem
),
991 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
993 stack_save
= gen_rtx_MEM (sa_mode
,
994 plus_constant (Pmode
, buf_addr
,
995 2 * GET_MODE_SIZE (Pmode
)));
996 set_mem_alias_set (stack_save
, setjmp_alias_set
);
997 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
999 /* If there is further processing to do, do it. */
1000 if (targetm
.have_builtin_setjmp_setup ())
1001 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
1003 /* We have a nonlocal label. */
1004 cfun
->has_nonlocal_label
= 1;
1007 /* Construct the trailing part of a __builtin_setjmp call. This is
1008 also called directly by the SJLJ exception handling code.
1009 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1012 expand_builtin_setjmp_receiver (rtx receiver_label
)
1016 /* Mark the FP as used when we get here, so we have to make sure it's
1017 marked as used by this function. */
1018 emit_use (hard_frame_pointer_rtx
);
1020 /* Mark the static chain as clobbered here so life information
1021 doesn't get messed up for it. */
1022 chain
= rtx_for_static_chain (current_function_decl
, true);
1023 if (chain
&& REG_P (chain
))
1024 emit_clobber (chain
);
1026 /* Now put in the code to restore the frame pointer, and argument
1027 pointer, if needed. */
1028 if (! targetm
.have_nonlocal_goto ())
1030 /* First adjust our frame pointer to its actual value. It was
1031 previously set to the start of the virtual area corresponding to
1032 the stacked variables when we branched here and now needs to be
1033 adjusted to the actual hardware fp value.
1035 Assignments to virtual registers are converted by
1036 instantiate_virtual_regs into the corresponding assignment
1037 to the underlying register (fp in this case) that makes
1038 the original assignment true.
1039 So the following insn will actually be decrementing fp by
1040 TARGET_STARTING_FRAME_OFFSET. */
1041 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
1043 /* Restoring the frame pointer also modifies the hard frame pointer.
1044 Mark it used (so that the previous assignment remains live once
1045 the frame pointer is eliminated) and clobbered (to represent the
1046 implicit update from the assignment). */
1047 emit_use (hard_frame_pointer_rtx
);
1048 emit_clobber (hard_frame_pointer_rtx
);
1051 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
1053 /* If the argument pointer can be eliminated in favor of the
1054 frame pointer, we don't need to restore it. We assume here
1055 that if such an elimination is present, it can always be used.
1056 This is the case on all known machines; if we don't make this
1057 assumption, we do unnecessary saving on many machines. */
1059 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1061 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1062 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1063 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1066 if (i
== ARRAY_SIZE (elim_regs
))
1068 /* Now restore our arg pointer from the address at which it
1069 was saved in our stack frame. */
1070 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1071 copy_to_reg (get_arg_pointer_save_area ()));
1075 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
1076 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
1077 else if (targetm
.have_nonlocal_goto_receiver ())
1078 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
1082 /* We must not allow the code we just generated to be reordered by
1083 scheduling. Specifically, the update of the frame pointer must
1084 happen immediately, not later. */
1085 emit_insn (gen_blockage ());
1088 /* __builtin_longjmp is passed a pointer to an array of five words (not
1089 all will be used on all machines). It operates similarly to the C
1090 library function of the same name, but is more efficient. Much of
1091 the code below is copied from the handling of non-local gotos. */
1094 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1097 rtx_insn
*insn
, *last
;
1098 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1100 /* DRAP is needed for stack realign if longjmp is expanded to current
1102 if (SUPPORTS_STACK_ALIGNMENT
)
1103 crtl
->need_drap
= true;
1105 if (setjmp_alias_set
== -1)
1106 setjmp_alias_set
= new_alias_set ();
1108 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1110 buf_addr
= force_reg (Pmode
, buf_addr
);
1112 /* We require that the user must pass a second argument of 1, because
1113 that is what builtin_setjmp will return. */
1114 gcc_assert (value
== const1_rtx
);
1116 last
= get_last_insn ();
1117 if (targetm
.have_builtin_longjmp ())
1118 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1121 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1122 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1123 GET_MODE_SIZE (Pmode
)));
1125 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1126 2 * GET_MODE_SIZE (Pmode
)));
1127 set_mem_alias_set (fp
, setjmp_alias_set
);
1128 set_mem_alias_set (lab
, setjmp_alias_set
);
1129 set_mem_alias_set (stack
, setjmp_alias_set
);
1131 /* Pick up FP, label, and SP from the block and jump. This code is
1132 from expand_goto in stmt.c; see there for detailed comments. */
1133 if (targetm
.have_nonlocal_goto ())
1134 /* We have to pass a value to the nonlocal_goto pattern that will
1135 get copied into the static_chain pointer, but it does not matter
1136 what that value is, because builtin_setjmp does not use it. */
1137 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1140 lab
= copy_to_reg (lab
);
1142 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1143 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1145 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1146 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1148 emit_use (hard_frame_pointer_rtx
);
1149 emit_use (stack_pointer_rtx
);
1150 emit_indirect_jump (lab
);
1154 /* Search backwards and mark the jump insn as a non-local goto.
1155 Note that this precludes the use of __builtin_longjmp to a
1156 __builtin_setjmp target in the same function. However, we've
1157 already cautioned the user that these functions are for
1158 internal exception handling use only. */
1159 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1161 gcc_assert (insn
!= last
);
1165 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1168 else if (CALL_P (insn
))
1174 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1176 return (iter
->i
< iter
->n
);
1179 /* This function validates the types of a function call argument list
1180 against a specified list of tree_codes. If the last specifier is a 0,
1181 that represents an ellipsis, otherwise the last specifier must be a
1185 validate_arglist (const_tree callexpr
, ...)
1187 enum tree_code code
;
1190 const_call_expr_arg_iterator iter
;
1193 va_start (ap
, callexpr
);
1194 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1196 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1197 tree fn
= CALL_EXPR_FN (callexpr
);
1198 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1200 for (unsigned argno
= 1; ; ++argno
)
1202 code
= (enum tree_code
) va_arg (ap
, int);
1207 /* This signifies an ellipses, any further arguments are all ok. */
1211 /* This signifies an endlink, if no arguments remain, return
1212 true, otherwise return false. */
1213 res
= !more_const_call_expr_args_p (&iter
);
1216 /* The actual argument must be nonnull when either the whole
1217 called function has been declared nonnull, or when the formal
1218 argument corresponding to the actual argument has been. */
1220 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1222 arg
= next_const_call_expr_arg (&iter
);
1223 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1229 /* If no parameters remain or the parameter's code does not
1230 match the specified code, return false. Otherwise continue
1231 checking any remaining arguments. */
1232 arg
= next_const_call_expr_arg (&iter
);
1233 if (!validate_arg (arg
, code
))
1239 /* We need gotos here since we can only have one VA_CLOSE in a
1244 BITMAP_FREE (argmap
);
1249 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1250 and the address of the save area. */
1253 expand_builtin_nonlocal_goto (tree exp
)
1255 tree t_label
, t_save_area
;
1256 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1259 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1262 t_label
= CALL_EXPR_ARG (exp
, 0);
1263 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1265 r_label
= expand_normal (t_label
);
1266 r_label
= convert_memory_address (Pmode
, r_label
);
1267 r_save_area
= expand_normal (t_save_area
);
1268 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1269 /* Copy the address of the save location to a register just in case it was
1270 based on the frame pointer. */
1271 r_save_area
= copy_to_reg (r_save_area
);
1272 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1273 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1274 plus_constant (Pmode
, r_save_area
,
1275 GET_MODE_SIZE (Pmode
)));
1277 crtl
->has_nonlocal_goto
= 1;
1279 /* ??? We no longer need to pass the static chain value, afaik. */
1280 if (targetm
.have_nonlocal_goto ())
1281 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1284 r_label
= copy_to_reg (r_label
);
1286 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1287 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1289 /* Restore frame pointer for containing function. */
1290 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1291 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1293 /* USE of hard_frame_pointer_rtx added for consistency;
1294 not clear if really needed. */
1295 emit_use (hard_frame_pointer_rtx
);
1296 emit_use (stack_pointer_rtx
);
1298 /* If the architecture is using a GP register, we must
1299 conservatively assume that the target function makes use of it.
1300 The prologue of functions with nonlocal gotos must therefore
1301 initialize the GP register to the appropriate value, and we
1302 must then make sure that this value is live at the point
1303 of the jump. (Note that this doesn't necessarily apply
1304 to targets with a nonlocal_goto pattern; they are free
1305 to implement it in their own way. Note also that this is
1306 a no-op if the GP register is a global invariant.) */
1307 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1308 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1309 emit_use (pic_offset_table_rtx
);
1311 emit_indirect_jump (r_label
);
1314 /* Search backwards to the jump insn and mark it as a
1316 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1320 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1323 else if (CALL_P (insn
))
1330 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1331 (not all will be used on all machines) that was passed to __builtin_setjmp.
1332 It updates the stack pointer in that block to the current value. This is
1333 also called directly by the SJLJ exception handling code. */
1336 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1338 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1339 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1341 = gen_rtx_MEM (sa_mode
,
1344 plus_constant (Pmode
, buf_addr
,
1345 2 * GET_MODE_SIZE (Pmode
))));
1347 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1350 /* Expand a call to __builtin_prefetch. For a target that does not support
1351 data prefetch, evaluate the memory address argument in case it has side
1355 expand_builtin_prefetch (tree exp
)
1357 tree arg0
, arg1
, arg2
;
1361 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1364 arg0
= CALL_EXPR_ARG (exp
, 0);
1366 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1367 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1369 nargs
= call_expr_nargs (exp
);
1371 arg1
= CALL_EXPR_ARG (exp
, 1);
1373 arg1
= integer_zero_node
;
1375 arg2
= CALL_EXPR_ARG (exp
, 2);
1377 arg2
= integer_three_node
;
1379 /* Argument 0 is an address. */
1380 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1382 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1383 if (TREE_CODE (arg1
) != INTEGER_CST
)
1385 error ("second argument to %<__builtin_prefetch%> must be a constant");
1386 arg1
= integer_zero_node
;
1388 op1
= expand_normal (arg1
);
1389 /* Argument 1 must be either zero or one. */
1390 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1392 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1397 /* Argument 2 (locality) must be a compile-time constant int. */
1398 if (TREE_CODE (arg2
) != INTEGER_CST
)
1400 error ("third argument to %<__builtin_prefetch%> must be a constant");
1401 arg2
= integer_zero_node
;
1403 op2
= expand_normal (arg2
);
1404 /* Argument 2 must be 0, 1, 2, or 3. */
1405 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1407 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1411 if (targetm
.have_prefetch ())
1413 struct expand_operand ops
[3];
1415 create_address_operand (&ops
[0], op0
);
1416 create_integer_operand (&ops
[1], INTVAL (op1
));
1417 create_integer_operand (&ops
[2], INTVAL (op2
));
1418 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1422 /* Don't do anything with direct references to volatile memory, but
1423 generate code to handle other side effects. */
1424 if (!MEM_P (op0
) && side_effects_p (op0
))
1428 /* Get a MEM rtx for expression EXP which is the address of an operand
1429 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1430 the maximum length of the block of memory that might be accessed or
1434 get_memory_rtx (tree exp
, tree len
)
1436 tree orig_exp
= exp
;
1439 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1440 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1441 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1442 exp
= TREE_OPERAND (exp
, 0);
1444 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1445 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1447 /* Get an expression we can use to find the attributes to assign to MEM.
1448 First remove any nops. */
1449 while (CONVERT_EXPR_P (exp
)
1450 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1451 exp
= TREE_OPERAND (exp
, 0);
1453 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1454 (as builtin stringops may alias with anything). */
1455 exp
= fold_build2 (MEM_REF
,
1456 build_array_type (char_type_node
,
1457 build_range_type (sizetype
,
1458 size_one_node
, len
)),
1459 exp
, build_int_cst (ptr_type_node
, 0));
1461 /* If the MEM_REF has no acceptable address, try to get the base object
1462 from the original address we got, and build an all-aliasing
1463 unknown-sized access to that one. */
1464 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1465 set_mem_attributes (mem
, exp
, 0);
1466 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1467 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1470 exp
= build_fold_addr_expr (exp
);
1471 exp
= fold_build2 (MEM_REF
,
1472 build_array_type (char_type_node
,
1473 build_range_type (sizetype
,
1476 exp
, build_int_cst (ptr_type_node
, 0));
1477 set_mem_attributes (mem
, exp
, 0);
1479 set_mem_alias_set (mem
, 0);
1483 /* Built-in functions to perform an untyped call and return. */
1485 #define apply_args_mode \
1486 (this_target_builtins->x_apply_args_mode)
1487 #define apply_result_mode \
1488 (this_target_builtins->x_apply_result_mode)
1490 /* Return the size required for the block returned by __builtin_apply_args,
1491 and initialize apply_args_mode. */
1494 apply_args_size (void)
1496 static int size
= -1;
1500 /* The values computed by this function never change. */
1503 /* The first value is the incoming arg-pointer. */
1504 size
= GET_MODE_SIZE (Pmode
);
1506 /* The second value is the structure value address unless this is
1507 passed as an "invisible" first argument. */
1508 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1509 size
+= GET_MODE_SIZE (Pmode
);
1511 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1512 if (FUNCTION_ARG_REGNO_P (regno
))
1514 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1516 gcc_assert (mode
!= VOIDmode
);
1518 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1519 if (size
% align
!= 0)
1520 size
= CEIL (size
, align
) * align
;
1521 size
+= GET_MODE_SIZE (mode
);
1522 apply_args_mode
[regno
] = mode
;
1526 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1532 /* Return the size required for the block returned by __builtin_apply,
1533 and initialize apply_result_mode. */
1536 apply_result_size (void)
1538 static int size
= -1;
1541 /* The values computed by this function never change. */
1546 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1547 if (targetm
.calls
.function_value_regno_p (regno
))
1549 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1551 gcc_assert (mode
!= VOIDmode
);
1553 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1554 if (size
% align
!= 0)
1555 size
= CEIL (size
, align
) * align
;
1556 size
+= GET_MODE_SIZE (mode
);
1557 apply_result_mode
[regno
] = mode
;
1560 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1562 /* Allow targets that use untyped_call and untyped_return to override
1563 the size so that machine-specific information can be stored here. */
1564 #ifdef APPLY_RESULT_SIZE
1565 size
= APPLY_RESULT_SIZE
;
1571 /* Create a vector describing the result block RESULT. If SAVEP is true,
1572 the result block is used to save the values; otherwise it is used to
1573 restore the values. */
1576 result_vector (int savep
, rtx result
)
1578 int regno
, size
, align
, nelts
;
1579 fixed_size_mode mode
;
1581 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1584 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1585 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1587 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1588 if (size
% align
!= 0)
1589 size
= CEIL (size
, align
) * align
;
1590 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1591 mem
= adjust_address (result
, mode
, size
);
1592 savevec
[nelts
++] = (savep
1593 ? gen_rtx_SET (mem
, reg
)
1594 : gen_rtx_SET (reg
, mem
));
1595 size
+= GET_MODE_SIZE (mode
);
1597 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1600 /* Save the state required to perform an untyped call with the same
1601 arguments as were passed to the current function. */
1604 expand_builtin_apply_args_1 (void)
1607 int size
, align
, regno
;
1608 fixed_size_mode mode
;
1609 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1611 /* Create a block where the arg-pointer, structure value address,
1612 and argument registers can be saved. */
1613 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1615 /* Walk past the arg-pointer and structure value address. */
1616 size
= GET_MODE_SIZE (Pmode
);
1617 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1618 size
+= GET_MODE_SIZE (Pmode
);
1620 /* Save each register used in calling a function to the block. */
1621 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1622 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1624 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1625 if (size
% align
!= 0)
1626 size
= CEIL (size
, align
) * align
;
1628 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1630 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1631 size
+= GET_MODE_SIZE (mode
);
1634 /* Save the arg pointer to the block. */
1635 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1636 /* We need the pointer as the caller actually passed them to us, not
1637 as we might have pretended they were passed. Make sure it's a valid
1638 operand, as emit_move_insn isn't expected to handle a PLUS. */
1639 if (STACK_GROWS_DOWNWARD
)
1641 = force_operand (plus_constant (Pmode
, tem
,
1642 crtl
->args
.pretend_args_size
),
1644 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1646 size
= GET_MODE_SIZE (Pmode
);
1648 /* Save the structure value address unless this is passed as an
1649 "invisible" first argument. */
1650 if (struct_incoming_value
)
1652 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1653 copy_to_reg (struct_incoming_value
));
1654 size
+= GET_MODE_SIZE (Pmode
);
1657 /* Return the address of the block. */
1658 return copy_addr_to_reg (XEXP (registers
, 0));
1661 /* __builtin_apply_args returns block of memory allocated on
1662 the stack into which is stored the arg pointer, structure
1663 value address, static chain, and all the registers that might
1664 possibly be used in performing a function call. The code is
1665 moved to the start of the function so the incoming values are
1669 expand_builtin_apply_args (void)
1671 /* Don't do __builtin_apply_args more than once in a function.
1672 Save the result of the first call and reuse it. */
1673 if (apply_args_value
!= 0)
1674 return apply_args_value
;
1676 /* When this function is called, it means that registers must be
1677 saved on entry to this function. So we migrate the
1678 call to the first insn of this function. */
1682 temp
= expand_builtin_apply_args_1 ();
1683 rtx_insn
*seq
= get_insns ();
1686 apply_args_value
= temp
;
1688 /* Put the insns after the NOTE that starts the function.
1689 If this is inside a start_sequence, make the outer-level insn
1690 chain current, so the code is placed at the start of the
1691 function. If internal_arg_pointer is a non-virtual pseudo,
1692 it needs to be placed after the function that initializes
1694 push_topmost_sequence ();
1695 if (REG_P (crtl
->args
.internal_arg_pointer
)
1696 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1697 emit_insn_before (seq
, parm_birth_insn
);
1699 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1700 pop_topmost_sequence ();
1705 /* Perform an untyped call and save the state required to perform an
1706 untyped return of whatever value was returned by the given function. */
1709 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1711 int size
, align
, regno
;
1712 fixed_size_mode mode
;
1713 rtx incoming_args
, result
, reg
, dest
, src
;
1714 rtx_call_insn
*call_insn
;
1715 rtx old_stack_level
= 0;
1716 rtx call_fusage
= 0;
1717 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1719 arguments
= convert_memory_address (Pmode
, arguments
);
1721 /* Create a block where the return registers can be saved. */
1722 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1724 /* Fetch the arg pointer from the ARGUMENTS block. */
1725 incoming_args
= gen_reg_rtx (Pmode
);
1726 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1727 if (!STACK_GROWS_DOWNWARD
)
1728 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1729 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1731 /* Push a new argument block and copy the arguments. Do not allow
1732 the (potential) memcpy call below to interfere with our stack
1734 do_pending_stack_adjust ();
1737 /* Save the stack with nonlocal if available. */
1738 if (targetm
.have_save_stack_nonlocal ())
1739 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1741 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1743 /* Allocate a block of memory onto the stack and copy the memory
1744 arguments to the outgoing arguments address. We can pass TRUE
1745 as the 4th argument because we just saved the stack pointer
1746 and will restore it right after the call. */
1747 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1749 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1750 may have already set current_function_calls_alloca to true.
1751 current_function_calls_alloca won't be set if argsize is zero,
1752 so we have to guarantee need_drap is true here. */
1753 if (SUPPORTS_STACK_ALIGNMENT
)
1754 crtl
->need_drap
= true;
1756 dest
= virtual_outgoing_args_rtx
;
1757 if (!STACK_GROWS_DOWNWARD
)
1759 if (CONST_INT_P (argsize
))
1760 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1762 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1764 dest
= gen_rtx_MEM (BLKmode
, dest
);
1765 set_mem_align (dest
, PARM_BOUNDARY
);
1766 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1767 set_mem_align (src
, PARM_BOUNDARY
);
1768 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1770 /* Refer to the argument block. */
1772 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1773 set_mem_align (arguments
, PARM_BOUNDARY
);
1775 /* Walk past the arg-pointer and structure value address. */
1776 size
= GET_MODE_SIZE (Pmode
);
1778 size
+= GET_MODE_SIZE (Pmode
);
1780 /* Restore each of the registers previously saved. Make USE insns
1781 for each of these registers for use in making the call. */
1782 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1783 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1785 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1786 if (size
% align
!= 0)
1787 size
= CEIL (size
, align
) * align
;
1788 reg
= gen_rtx_REG (mode
, regno
);
1789 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1790 use_reg (&call_fusage
, reg
);
1791 size
+= GET_MODE_SIZE (mode
);
1794 /* Restore the structure value address unless this is passed as an
1795 "invisible" first argument. */
1796 size
= GET_MODE_SIZE (Pmode
);
1799 rtx value
= gen_reg_rtx (Pmode
);
1800 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1801 emit_move_insn (struct_value
, value
);
1802 if (REG_P (struct_value
))
1803 use_reg (&call_fusage
, struct_value
);
1804 size
+= GET_MODE_SIZE (Pmode
);
1807 /* All arguments and registers used for the call are set up by now! */
1808 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1810 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1811 and we don't want to load it into a register as an optimization,
1812 because prepare_call_address already did it if it should be done. */
1813 if (GET_CODE (function
) != SYMBOL_REF
)
1814 function
= memory_address (FUNCTION_MODE
, function
);
1816 /* Generate the actual call instruction and save the return value. */
1817 if (targetm
.have_untyped_call ())
1819 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1820 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1821 result_vector (1, result
)));
1823 else if (targetm
.have_call_value ())
1827 /* Locate the unique return register. It is not possible to
1828 express a call that sets more than one return register using
1829 call_value; use untyped_call for that. In fact, untyped_call
1830 only needs to save the return registers in the given block. */
1831 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1832 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1834 gcc_assert (!valreg
); /* have_untyped_call required. */
1836 valreg
= gen_rtx_REG (mode
, regno
);
1839 emit_insn (targetm
.gen_call_value (valreg
,
1840 gen_rtx_MEM (FUNCTION_MODE
, function
),
1841 const0_rtx
, NULL_RTX
, const0_rtx
));
1843 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1848 /* Find the CALL insn we just emitted, and attach the register usage
1850 call_insn
= last_call_insn ();
1851 add_function_usage_to (call_insn
, call_fusage
);
1853 /* Restore the stack. */
1854 if (targetm
.have_save_stack_nonlocal ())
1855 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1857 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1858 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1862 /* Return the address of the result block. */
1863 result
= copy_addr_to_reg (XEXP (result
, 0));
1864 return convert_memory_address (ptr_mode
, result
);
1867 /* Perform an untyped return. */
1870 expand_builtin_return (rtx result
)
1872 int size
, align
, regno
;
1873 fixed_size_mode mode
;
1875 rtx_insn
*call_fusage
= 0;
1877 result
= convert_memory_address (Pmode
, result
);
1879 apply_result_size ();
1880 result
= gen_rtx_MEM (BLKmode
, result
);
1882 if (targetm
.have_untyped_return ())
1884 rtx vector
= result_vector (0, result
);
1885 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1890 /* Restore the return value and note that each value is used. */
1892 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1893 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1895 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1896 if (size
% align
!= 0)
1897 size
= CEIL (size
, align
) * align
;
1898 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1899 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1901 push_to_sequence (call_fusage
);
1903 call_fusage
= get_insns ();
1905 size
+= GET_MODE_SIZE (mode
);
1908 /* Put the USE insns before the return. */
1909 emit_insn (call_fusage
);
1911 /* Return whatever values was restored by jumping directly to the end
1913 expand_naked_return ();
1916 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1918 static enum type_class
1919 type_to_class (tree type
)
1921 switch (TREE_CODE (type
))
1923 case VOID_TYPE
: return void_type_class
;
1924 case INTEGER_TYPE
: return integer_type_class
;
1925 case ENUMERAL_TYPE
: return enumeral_type_class
;
1926 case BOOLEAN_TYPE
: return boolean_type_class
;
1927 case POINTER_TYPE
: return pointer_type_class
;
1928 case REFERENCE_TYPE
: return reference_type_class
;
1929 case OFFSET_TYPE
: return offset_type_class
;
1930 case REAL_TYPE
: return real_type_class
;
1931 case COMPLEX_TYPE
: return complex_type_class
;
1932 case FUNCTION_TYPE
: return function_type_class
;
1933 case METHOD_TYPE
: return method_type_class
;
1934 case RECORD_TYPE
: return record_type_class
;
1936 case QUAL_UNION_TYPE
: return union_type_class
;
1937 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1938 ? string_type_class
: array_type_class
);
1939 case LANG_TYPE
: return lang_type_class
;
1940 default: return no_type_class
;
1944 /* Expand a call EXP to __builtin_classify_type. */
1947 expand_builtin_classify_type (tree exp
)
1949 if (call_expr_nargs (exp
))
1950 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1951 return GEN_INT (no_type_class
);
1954 /* This helper macro, meant to be used in mathfn_built_in below, determines
1955 which among a set of builtin math functions is appropriate for a given type
1956 mode. The `F' (float) and `L' (long double) are automatically generated
1957 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1958 types, there are additional types that are considered with 'F32', 'F64',
1959 'F128', etc. suffixes. */
1960 #define CASE_MATHFN(MATHFN) \
1961 CASE_CFN_##MATHFN: \
1962 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1963 fcodel = BUILT_IN_##MATHFN##L ; break;
1964 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1966 #define CASE_MATHFN_FLOATN(MATHFN) \
1967 CASE_CFN_##MATHFN: \
1968 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1969 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1970 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1971 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1972 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1974 /* Similar to above, but appends _R after any F/L suffix. */
1975 #define CASE_MATHFN_REENT(MATHFN) \
1976 case CFN_BUILT_IN_##MATHFN##_R: \
1977 case CFN_BUILT_IN_##MATHFN##F_R: \
1978 case CFN_BUILT_IN_##MATHFN##L_R: \
1979 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1980 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1982 /* Return a function equivalent to FN but operating on floating-point
1983 values of type TYPE, or END_BUILTINS if no such function exists.
1984 This is purely an operation on function codes; it does not guarantee
1985 that the target actually has an implementation of the function. */
1987 static built_in_function
1988 mathfn_built_in_2 (tree type
, combined_fn fn
)
1991 built_in_function fcode
, fcodef
, fcodel
;
1992 built_in_function fcodef16
= END_BUILTINS
;
1993 built_in_function fcodef32
= END_BUILTINS
;
1994 built_in_function fcodef64
= END_BUILTINS
;
1995 built_in_function fcodef128
= END_BUILTINS
;
1996 built_in_function fcodef32x
= END_BUILTINS
;
1997 built_in_function fcodef64x
= END_BUILTINS
;
1998 built_in_function fcodef128x
= END_BUILTINS
;
2010 CASE_MATHFN_FLOATN (CEIL
)
2012 CASE_MATHFN_FLOATN (COPYSIGN
)
2024 CASE_MATHFN_FLOATN (FLOOR
)
2025 CASE_MATHFN_FLOATN (FMA
)
2026 CASE_MATHFN_FLOATN (FMAX
)
2027 CASE_MATHFN_FLOATN (FMIN
)
2031 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
2032 CASE_MATHFN (HUGE_VAL
)
2036 CASE_MATHFN (IFLOOR
)
2039 CASE_MATHFN (IROUND
)
2046 CASE_MATHFN (LFLOOR
)
2047 CASE_MATHFN (LGAMMA
)
2048 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
2049 CASE_MATHFN (LLCEIL
)
2050 CASE_MATHFN (LLFLOOR
)
2051 CASE_MATHFN (LLRINT
)
2052 CASE_MATHFN (LLROUND
)
2059 CASE_MATHFN (LROUND
)
2063 CASE_MATHFN_FLOATN (NEARBYINT
)
2064 CASE_MATHFN (NEXTAFTER
)
2065 CASE_MATHFN (NEXTTOWARD
)
2069 CASE_MATHFN (REMAINDER
)
2070 CASE_MATHFN (REMQUO
)
2071 CASE_MATHFN_FLOATN (RINT
)
2072 CASE_MATHFN_FLOATN (ROUND
)
2074 CASE_MATHFN (SCALBLN
)
2075 CASE_MATHFN (SCALBN
)
2076 CASE_MATHFN (SIGNBIT
)
2077 CASE_MATHFN (SIGNIFICAND
)
2079 CASE_MATHFN (SINCOS
)
2081 CASE_MATHFN_FLOATN (SQRT
)
2084 CASE_MATHFN (TGAMMA
)
2085 CASE_MATHFN_FLOATN (TRUNC
)
2091 return END_BUILTINS
;
2094 mtype
= TYPE_MAIN_VARIANT (type
);
2095 if (mtype
== double_type_node
)
2097 else if (mtype
== float_type_node
)
2099 else if (mtype
== long_double_type_node
)
2101 else if (mtype
== float16_type_node
)
2103 else if (mtype
== float32_type_node
)
2105 else if (mtype
== float64_type_node
)
2107 else if (mtype
== float128_type_node
)
2109 else if (mtype
== float32x_type_node
)
2111 else if (mtype
== float64x_type_node
)
2113 else if (mtype
== float128x_type_node
)
2116 return END_BUILTINS
;
2119 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2120 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2121 otherwise use the explicit declaration. If we can't do the conversion,
2125 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2127 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2128 if (fcode2
== END_BUILTINS
)
2131 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2134 return builtin_decl_explicit (fcode2
);
2137 /* Like mathfn_built_in_1, but always use the implicit array. */
2140 mathfn_built_in (tree type
, combined_fn fn
)
2142 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2145 /* Like mathfn_built_in_1, but take a built_in_function and
2146 always use the implicit array. */
2149 mathfn_built_in (tree type
, enum built_in_function fn
)
2151 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2154 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2155 return its code, otherwise return IFN_LAST. Note that this function
2156 only tests whether the function is defined in internals.def, not whether
2157 it is actually available on the target. */
2160 associated_internal_fn (tree fndecl
)
2162 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2163 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2164 switch (DECL_FUNCTION_CODE (fndecl
))
2166 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2167 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2168 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2169 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2170 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2171 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2172 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2173 #include "internal-fn.def"
2175 CASE_FLT_FN (BUILT_IN_POW10
):
2178 CASE_FLT_FN (BUILT_IN_DREM
):
2179 return IFN_REMAINDER
;
2181 CASE_FLT_FN (BUILT_IN_SCALBN
):
2182 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2183 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2192 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2193 on the current target by a call to an internal function, return the
2194 code of that internal function, otherwise return IFN_LAST. The caller
2195 is responsible for ensuring that any side-effects of the built-in
2196 call are dealt with correctly. E.g. if CALL sets errno, the caller
2197 must decide that the errno result isn't needed or make it available
2198 in some other way. */
2201 replacement_internal_fn (gcall
*call
)
2203 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2205 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2206 if (ifn
!= IFN_LAST
)
2208 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2209 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2210 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2217 /* Expand a call to the builtin trinary math functions (fma).
2218 Return NULL_RTX if a normal call should be emitted rather than expanding the
2219 function in-line. EXP is the expression that is a call to the builtin
2220 function; if convenient, the result should be placed in TARGET.
2221 SUBTARGET may be used as the target for computing one of EXP's
2225 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2227 optab builtin_optab
;
2228 rtx op0
, op1
, op2
, result
;
2230 tree fndecl
= get_callee_fndecl (exp
);
2231 tree arg0
, arg1
, arg2
;
2234 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2237 arg0
= CALL_EXPR_ARG (exp
, 0);
2238 arg1
= CALL_EXPR_ARG (exp
, 1);
2239 arg2
= CALL_EXPR_ARG (exp
, 2);
2241 switch (DECL_FUNCTION_CODE (fndecl
))
2243 CASE_FLT_FN (BUILT_IN_FMA
):
2244 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2245 builtin_optab
= fma_optab
; break;
2250 /* Make a suitable register to place result in. */
2251 mode
= TYPE_MODE (TREE_TYPE (exp
));
2253 /* Before working hard, check whether the instruction is available. */
2254 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2257 result
= gen_reg_rtx (mode
);
2259 /* Always stabilize the argument list. */
2260 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2261 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2262 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2264 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2265 op1
= expand_normal (arg1
);
2266 op2
= expand_normal (arg2
);
2270 /* Compute into RESULT.
2271 Set RESULT to wherever the result comes back. */
2272 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2275 /* If we were unable to expand via the builtin, stop the sequence
2276 (without outputting the insns) and call to the library function
2277 with the stabilized argument list. */
2281 return expand_call (exp
, target
, target
== const0_rtx
);
2284 /* Output the entire sequence. */
2285 insns
= get_insns ();
2292 /* Expand a call to the builtin sin and cos math functions.
2293 Return NULL_RTX if a normal call should be emitted rather than expanding the
2294 function in-line. EXP is the expression that is a call to the builtin
2295 function; if convenient, the result should be placed in TARGET.
2296 SUBTARGET may be used as the target for computing one of EXP's
2300 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2302 optab builtin_optab
;
2305 tree fndecl
= get_callee_fndecl (exp
);
2309 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2312 arg
= CALL_EXPR_ARG (exp
, 0);
2314 switch (DECL_FUNCTION_CODE (fndecl
))
2316 CASE_FLT_FN (BUILT_IN_SIN
):
2317 CASE_FLT_FN (BUILT_IN_COS
):
2318 builtin_optab
= sincos_optab
; break;
2323 /* Make a suitable register to place result in. */
2324 mode
= TYPE_MODE (TREE_TYPE (exp
));
2326 /* Check if sincos insn is available, otherwise fallback
2327 to sin or cos insn. */
2328 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2329 switch (DECL_FUNCTION_CODE (fndecl
))
2331 CASE_FLT_FN (BUILT_IN_SIN
):
2332 builtin_optab
= sin_optab
; break;
2333 CASE_FLT_FN (BUILT_IN_COS
):
2334 builtin_optab
= cos_optab
; break;
2339 /* Before working hard, check whether the instruction is available. */
2340 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2342 rtx result
= gen_reg_rtx (mode
);
2344 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2345 need to expand the argument again. This way, we will not perform
2346 side-effects more the once. */
2347 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2349 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2353 /* Compute into RESULT.
2354 Set RESULT to wherever the result comes back. */
2355 if (builtin_optab
== sincos_optab
)
2359 switch (DECL_FUNCTION_CODE (fndecl
))
2361 CASE_FLT_FN (BUILT_IN_SIN
):
2362 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2364 CASE_FLT_FN (BUILT_IN_COS
):
2365 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2373 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2377 /* Output the entire sequence. */
2378 insns
= get_insns ();
2384 /* If we were unable to expand via the builtin, stop the sequence
2385 (without outputting the insns) and call to the library function
2386 with the stabilized argument list. */
2390 return expand_call (exp
, target
, target
== const0_rtx
);
2393 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2394 return an RTL instruction code that implements the functionality.
2395 If that isn't possible or available return CODE_FOR_nothing. */
2397 static enum insn_code
2398 interclass_mathfn_icode (tree arg
, tree fndecl
)
2400 bool errno_set
= false;
2401 optab builtin_optab
= unknown_optab
;
2404 switch (DECL_FUNCTION_CODE (fndecl
))
2406 CASE_FLT_FN (BUILT_IN_ILOGB
):
2407 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2408 CASE_FLT_FN (BUILT_IN_ISINF
):
2409 builtin_optab
= isinf_optab
; break;
2410 case BUILT_IN_ISNORMAL
:
2411 case BUILT_IN_ISFINITE
:
2412 CASE_FLT_FN (BUILT_IN_FINITE
):
2413 case BUILT_IN_FINITED32
:
2414 case BUILT_IN_FINITED64
:
2415 case BUILT_IN_FINITED128
:
2416 case BUILT_IN_ISINFD32
:
2417 case BUILT_IN_ISINFD64
:
2418 case BUILT_IN_ISINFD128
:
2419 /* These builtins have no optabs (yet). */
2425 /* There's no easy way to detect the case we need to set EDOM. */
2426 if (flag_errno_math
&& errno_set
)
2427 return CODE_FOR_nothing
;
2429 /* Optab mode depends on the mode of the input argument. */
2430 mode
= TYPE_MODE (TREE_TYPE (arg
));
2433 return optab_handler (builtin_optab
, mode
);
2434 return CODE_FOR_nothing
;
2437 /* Expand a call to one of the builtin math functions that operate on
2438 floating point argument and output an integer result (ilogb, isinf,
2440 Return 0 if a normal call should be emitted rather than expanding the
2441 function in-line. EXP is the expression that is a call to the builtin
2442 function; if convenient, the result should be placed in TARGET. */
2445 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2447 enum insn_code icode
= CODE_FOR_nothing
;
2449 tree fndecl
= get_callee_fndecl (exp
);
2453 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2456 arg
= CALL_EXPR_ARG (exp
, 0);
2457 icode
= interclass_mathfn_icode (arg
, fndecl
);
2458 mode
= TYPE_MODE (TREE_TYPE (arg
));
2460 if (icode
!= CODE_FOR_nothing
)
2462 struct expand_operand ops
[1];
2463 rtx_insn
*last
= get_last_insn ();
2464 tree orig_arg
= arg
;
2466 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2467 need to expand the argument again. This way, we will not perform
2468 side-effects more the once. */
2469 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2471 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2473 if (mode
!= GET_MODE (op0
))
2474 op0
= convert_to_mode (mode
, op0
, 0);
2476 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2477 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2478 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2479 return ops
[0].value
;
2481 delete_insns_since (last
);
2482 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2488 /* Expand a call to the builtin sincos math function.
2489 Return NULL_RTX if a normal call should be emitted rather than expanding the
2490 function in-line. EXP is the expression that is a call to the builtin
2494 expand_builtin_sincos (tree exp
)
2496 rtx op0
, op1
, op2
, target1
, target2
;
2498 tree arg
, sinp
, cosp
;
2500 location_t loc
= EXPR_LOCATION (exp
);
2501 tree alias_type
, alias_off
;
2503 if (!validate_arglist (exp
, REAL_TYPE
,
2504 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2507 arg
= CALL_EXPR_ARG (exp
, 0);
2508 sinp
= CALL_EXPR_ARG (exp
, 1);
2509 cosp
= CALL_EXPR_ARG (exp
, 2);
2511 /* Make a suitable register to place result in. */
2512 mode
= TYPE_MODE (TREE_TYPE (arg
));
2514 /* Check if sincos insn is available, otherwise emit the call. */
2515 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2518 target1
= gen_reg_rtx (mode
);
2519 target2
= gen_reg_rtx (mode
);
2521 op0
= expand_normal (arg
);
2522 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2523 alias_off
= build_int_cst (alias_type
, 0);
2524 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2526 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2529 /* Compute into target1 and target2.
2530 Set TARGET to wherever the result comes back. */
2531 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2532 gcc_assert (result
);
2534 /* Move target1 and target2 to the memory locations indicated
2536 emit_move_insn (op1
, target1
);
2537 emit_move_insn (op2
, target2
);
2542 /* Expand a call to the internal cexpi builtin to the sincos math function.
2543 EXP is the expression that is a call to the builtin function; if convenient,
2544 the result should be placed in TARGET. */
2547 expand_builtin_cexpi (tree exp
, rtx target
)
2549 tree fndecl
= get_callee_fndecl (exp
);
2553 location_t loc
= EXPR_LOCATION (exp
);
2555 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2558 arg
= CALL_EXPR_ARG (exp
, 0);
2559 type
= TREE_TYPE (arg
);
2560 mode
= TYPE_MODE (TREE_TYPE (arg
));
2562 /* Try expanding via a sincos optab, fall back to emitting a libcall
2563 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2564 is only generated from sincos, cexp or if we have either of them. */
2565 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2567 op1
= gen_reg_rtx (mode
);
2568 op2
= gen_reg_rtx (mode
);
2570 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2572 /* Compute into op1 and op2. */
2573 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2575 else if (targetm
.libc_has_function (function_sincos
))
2577 tree call
, fn
= NULL_TREE
;
2581 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2582 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2583 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2584 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2585 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2586 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2590 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2591 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2592 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2593 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2594 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2595 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2597 /* Make sure not to fold the sincos call again. */
2598 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2599 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2600 call
, 3, arg
, top1
, top2
));
2604 tree call
, fn
= NULL_TREE
, narg
;
2605 tree ctype
= build_complex_type (type
);
2607 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2608 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2609 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2610 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2611 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2612 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2616 /* If we don't have a decl for cexp create one. This is the
2617 friendliest fallback if the user calls __builtin_cexpi
2618 without full target C99 function support. */
2619 if (fn
== NULL_TREE
)
2622 const char *name
= NULL
;
2624 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2626 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2628 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2631 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2632 fn
= build_fn_decl (name
, fntype
);
2635 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2636 build_real (type
, dconst0
), arg
);
2638 /* Make sure not to fold the cexp call again. */
2639 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2640 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2641 target
, VOIDmode
, EXPAND_NORMAL
);
2644 /* Now build the proper return type. */
2645 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2646 make_tree (TREE_TYPE (arg
), op2
),
2647 make_tree (TREE_TYPE (arg
), op1
)),
2648 target
, VOIDmode
, EXPAND_NORMAL
);
2651 /* Conveniently construct a function call expression. FNDECL names the
2652 function to be called, N is the number of arguments, and the "..."
2653 parameters are the argument expressions. Unlike build_call_exr
2654 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2657 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2660 tree fntype
= TREE_TYPE (fndecl
);
2661 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2664 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2666 SET_EXPR_LOCATION (fn
, loc
);
2670 /* Expand a call to one of the builtin rounding functions gcc defines
2671 as an extension (lfloor and lceil). As these are gcc extensions we
2672 do not need to worry about setting errno to EDOM.
2673 If expanding via optab fails, lower expression to (int)(floor(x)).
2674 EXP is the expression that is a call to the builtin function;
2675 if convenient, the result should be placed in TARGET. */
2678 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2680 convert_optab builtin_optab
;
2683 tree fndecl
= get_callee_fndecl (exp
);
2684 enum built_in_function fallback_fn
;
2685 tree fallback_fndecl
;
2689 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2692 arg
= CALL_EXPR_ARG (exp
, 0);
2694 switch (DECL_FUNCTION_CODE (fndecl
))
2696 CASE_FLT_FN (BUILT_IN_ICEIL
):
2697 CASE_FLT_FN (BUILT_IN_LCEIL
):
2698 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2699 builtin_optab
= lceil_optab
;
2700 fallback_fn
= BUILT_IN_CEIL
;
2703 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2704 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2705 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2706 builtin_optab
= lfloor_optab
;
2707 fallback_fn
= BUILT_IN_FLOOR
;
2714 /* Make a suitable register to place result in. */
2715 mode
= TYPE_MODE (TREE_TYPE (exp
));
2717 target
= gen_reg_rtx (mode
);
2719 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2720 need to expand the argument again. This way, we will not perform
2721 side-effects more the once. */
2722 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2724 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2728 /* Compute into TARGET. */
2729 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2731 /* Output the entire sequence. */
2732 insns
= get_insns ();
2738 /* If we were unable to expand via the builtin, stop the sequence
2739 (without outputting the insns). */
2742 /* Fall back to floating point rounding optab. */
2743 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2745 /* For non-C99 targets we may end up without a fallback fndecl here
2746 if the user called __builtin_lfloor directly. In this case emit
2747 a call to the floor/ceil variants nevertheless. This should result
2748 in the best user experience for not full C99 targets. */
2749 if (fallback_fndecl
== NULL_TREE
)
2752 const char *name
= NULL
;
2754 switch (DECL_FUNCTION_CODE (fndecl
))
2756 case BUILT_IN_ICEIL
:
2757 case BUILT_IN_LCEIL
:
2758 case BUILT_IN_LLCEIL
:
2761 case BUILT_IN_ICEILF
:
2762 case BUILT_IN_LCEILF
:
2763 case BUILT_IN_LLCEILF
:
2766 case BUILT_IN_ICEILL
:
2767 case BUILT_IN_LCEILL
:
2768 case BUILT_IN_LLCEILL
:
2771 case BUILT_IN_IFLOOR
:
2772 case BUILT_IN_LFLOOR
:
2773 case BUILT_IN_LLFLOOR
:
2776 case BUILT_IN_IFLOORF
:
2777 case BUILT_IN_LFLOORF
:
2778 case BUILT_IN_LLFLOORF
:
2781 case BUILT_IN_IFLOORL
:
2782 case BUILT_IN_LFLOORL
:
2783 case BUILT_IN_LLFLOORL
:
2790 fntype
= build_function_type_list (TREE_TYPE (arg
),
2791 TREE_TYPE (arg
), NULL_TREE
);
2792 fallback_fndecl
= build_fn_decl (name
, fntype
);
2795 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2797 tmp
= expand_normal (exp
);
2798 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2800 /* Truncate the result of floating point optab to integer
2801 via expand_fix (). */
2802 target
= gen_reg_rtx (mode
);
2803 expand_fix (target
, tmp
, 0);
2808 /* Expand a call to one of the builtin math functions doing integer
2810 Return 0 if a normal call should be emitted rather than expanding the
2811 function in-line. EXP is the expression that is a call to the builtin
2812 function; if convenient, the result should be placed in TARGET. */
2815 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2817 convert_optab builtin_optab
;
2820 tree fndecl
= get_callee_fndecl (exp
);
2823 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2825 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2828 arg
= CALL_EXPR_ARG (exp
, 0);
2830 switch (DECL_FUNCTION_CODE (fndecl
))
2832 CASE_FLT_FN (BUILT_IN_IRINT
):
2833 fallback_fn
= BUILT_IN_LRINT
;
2835 CASE_FLT_FN (BUILT_IN_LRINT
):
2836 CASE_FLT_FN (BUILT_IN_LLRINT
):
2837 builtin_optab
= lrint_optab
;
2840 CASE_FLT_FN (BUILT_IN_IROUND
):
2841 fallback_fn
= BUILT_IN_LROUND
;
2843 CASE_FLT_FN (BUILT_IN_LROUND
):
2844 CASE_FLT_FN (BUILT_IN_LLROUND
):
2845 builtin_optab
= lround_optab
;
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2856 /* Make a suitable register to place result in. */
2857 mode
= TYPE_MODE (TREE_TYPE (exp
));
2859 /* There's no easy way to detect the case we need to set EDOM. */
2860 if (!flag_errno_math
)
2862 rtx result
= gen_reg_rtx (mode
);
2864 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2865 need to expand the argument again. This way, we will not perform
2866 side-effects more the once. */
2867 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2869 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2873 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2875 /* Output the entire sequence. */
2876 insns
= get_insns ();
2882 /* If we were unable to expand via the builtin, stop the sequence
2883 (without outputting the insns) and call to the library function
2884 with the stabilized argument list. */
2888 if (fallback_fn
!= BUILT_IN_NONE
)
2890 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2891 targets, (int) round (x) should never be transformed into
2892 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2893 a call to lround in the hope that the target provides at least some
2894 C99 functions. This should result in the best user experience for
2895 not full C99 targets. */
2896 tree fallback_fndecl
= mathfn_built_in_1
2897 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2899 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2900 fallback_fndecl
, 1, arg
);
2902 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2903 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2904 return convert_to_mode (mode
, target
, 0);
2907 return expand_call (exp
, target
, target
== const0_rtx
);
2910 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2911 a normal call should be emitted rather than expanding the function
2912 in-line. EXP is the expression that is a call to the builtin
2913 function; if convenient, the result should be placed in TARGET. */
2916 expand_builtin_powi (tree exp
, rtx target
)
2923 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2926 arg0
= CALL_EXPR_ARG (exp
, 0);
2927 arg1
= CALL_EXPR_ARG (exp
, 1);
2928 mode
= TYPE_MODE (TREE_TYPE (exp
));
2930 /* Emit a libcall to libgcc. */
2932 /* Mode of the 2nd argument must match that of an int. */
2933 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2935 if (target
== NULL_RTX
)
2936 target
= gen_reg_rtx (mode
);
2938 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2939 if (GET_MODE (op0
) != mode
)
2940 op0
= convert_to_mode (mode
, op0
, 0);
2941 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2942 if (GET_MODE (op1
) != mode2
)
2943 op1
= convert_to_mode (mode2
, op1
, 0);
2945 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2946 target
, LCT_CONST
, mode
,
2947 op0
, mode
, op1
, mode2
);
2952 /* Expand expression EXP which is a call to the strlen builtin. Return
2953 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2954 try to get the result in TARGET, if convenient. */
2957 expand_builtin_strlen (tree exp
, rtx target
,
2958 machine_mode target_mode
)
2960 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2963 struct expand_operand ops
[4];
2966 tree src
= CALL_EXPR_ARG (exp
, 0);
2968 rtx_insn
*before_strlen
;
2969 machine_mode insn_mode
;
2970 enum insn_code icode
= CODE_FOR_nothing
;
2973 /* If the length can be computed at compile-time, return it. */
2974 len
= c_strlen (src
, 0);
2976 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2978 /* If the length can be computed at compile-time and is constant
2979 integer, but there are side-effects in src, evaluate
2980 src for side-effects, then return len.
2981 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2982 can be optimized into: i++; x = 3; */
2983 len
= c_strlen (src
, 1);
2984 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2986 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2987 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2990 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2992 /* If SRC is not a pointer type, don't do this operation inline. */
2996 /* Bail out if we can't compute strlen in the right mode. */
2997 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2999 icode
= optab_handler (strlen_optab
, insn_mode
);
3000 if (icode
!= CODE_FOR_nothing
)
3003 if (insn_mode
== VOIDmode
)
3006 /* Make a place to hold the source address. We will not expand
3007 the actual source until we are sure that the expansion will
3008 not fail -- there are trees that cannot be expanded twice. */
3009 src_reg
= gen_reg_rtx (Pmode
);
3011 /* Mark the beginning of the strlen sequence so we can emit the
3012 source operand later. */
3013 before_strlen
= get_last_insn ();
3015 create_output_operand (&ops
[0], target
, insn_mode
);
3016 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3017 create_integer_operand (&ops
[2], 0);
3018 create_integer_operand (&ops
[3], align
);
3019 if (!maybe_expand_insn (icode
, 4, ops
))
3022 /* Check to see if the argument was declared attribute nonstring
3023 and if so, issue a warning since at this point it's not known
3024 to be nul-terminated. */
3025 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3027 /* Now that we are assured of success, expand the source. */
3029 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3032 #ifdef POINTERS_EXTEND_UNSIGNED
3033 if (GET_MODE (pat
) != Pmode
)
3034 pat
= convert_to_mode (Pmode
, pat
,
3035 POINTERS_EXTEND_UNSIGNED
);
3037 emit_move_insn (src_reg
, pat
);
3043 emit_insn_after (pat
, before_strlen
);
3045 emit_insn_before (pat
, get_insns ());
3047 /* Return the value in the proper mode for this function. */
3048 if (GET_MODE (ops
[0].value
) == target_mode
)
3049 target
= ops
[0].value
;
3050 else if (target
!= 0)
3051 convert_move (target
, ops
[0].value
, 0);
3053 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3058 /* Expand call EXP to the strnlen built-in, returning the result
3059 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3062 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3064 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3067 tree src
= CALL_EXPR_ARG (exp
, 0);
3068 tree bound
= CALL_EXPR_ARG (exp
, 1);
3073 location_t loc
= UNKNOWN_LOCATION
;
3074 if (EXPR_HAS_LOCATION (exp
))
3075 loc
= EXPR_LOCATION (exp
);
3077 tree maxobjsize
= max_object_size ();
3078 tree func
= get_callee_fndecl (exp
);
3080 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3081 so these conversions aren't necessary. */
3083 memset (&data
, 0, sizeof (c_strlen_data
));
3084 tree len
= c_strlen (src
, 0, &data
, 1);
3086 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3088 if (TREE_CODE (bound
) == INTEGER_CST
)
3090 if (!TREE_NO_WARNING (exp
)
3091 && tree_int_cst_lt (maxobjsize
, bound
)
3092 && warning_at (loc
, OPT_Wstringop_overflow_
,
3093 "%K%qD specified bound %E "
3094 "exceeds maximum object size %E",
3095 exp
, func
, bound
, maxobjsize
))
3096 TREE_NO_WARNING (exp
) = true;
3099 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3101 /* Clear EXACT if LEN may be less than SRC suggests,
3103 strnlen (&a[i], sizeof a)
3104 where the value of i is unknown. Unless i's value is
3105 zero, the call is unsafe because the bound is greater. */
3106 data
.decl
= unterminated_array (src
, &len
, &exact
);
3112 && !TREE_NO_WARNING (exp
)
3113 && ((tree_int_cst_lt (len
, bound
))
3117 = expansion_point_location_if_in_system_header (loc
);
3119 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3121 ? G_("%K%qD specified bound %E exceeds the size %E "
3122 "of unterminated array")
3123 : G_("%K%qD specified bound %E may exceed the size "
3124 "of at most %E of unterminated array"),
3125 exp
, func
, bound
, len
))
3127 inform (DECL_SOURCE_LOCATION (data
.decl
),
3128 "referenced argument declared here");
3129 TREE_NO_WARNING (exp
) = true;
3137 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3138 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3141 if (TREE_CODE (bound
) != SSA_NAME
)
3145 enum value_range_kind rng
= get_range_info (bound
, &min
, &max
);
3146 if (rng
!= VR_RANGE
)
3149 if (!TREE_NO_WARNING (exp
)
3150 && wi::ltu_p (wi::to_wide (maxobjsize
), min
)
3151 && warning_at (loc
, OPT_Wstringop_overflow_
,
3152 "%K%qD specified bound [%wu, %wu] "
3153 "exceeds maximum object size %E",
3154 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), maxobjsize
))
3155 TREE_NO_WARNING (exp
) = true;
3158 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3160 data
.decl
= unterminated_array (src
, &len
, &exact
);
3166 && !TREE_NO_WARNING (exp
)
3167 && (wi::ltu_p (wi::to_wide (len
), min
)
3171 = expansion_point_location_if_in_system_header (loc
);
3173 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3175 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3176 "the size %E of unterminated array")
3177 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3178 "the size of at most %E of unterminated array"),
3179 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), len
))
3181 inform (DECL_SOURCE_LOCATION (data
.decl
),
3182 "referenced argument declared here");
3183 TREE_NO_WARNING (exp
) = true;
3190 if (wi::gtu_p (min
, wi::to_wide (len
)))
3191 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3193 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3194 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3197 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3198 bytes from constant string DATA + OFFSET and return it as target
3202 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3203 scalar_int_mode mode
)
3205 const char *str
= (const char *) data
;
3207 gcc_assert (offset
>= 0
3208 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3209 <= strlen (str
) + 1));
3211 return c_readstr (str
+ offset
, mode
);
3214 /* LEN specify length of the block of memcpy/memset operation.
3215 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3216 In some cases we can make very likely guess on max size, then we
3217 set it into PROBABLE_MAX_SIZE. */
3220 determine_block_size (tree len
, rtx len_rtx
,
3221 unsigned HOST_WIDE_INT
*min_size
,
3222 unsigned HOST_WIDE_INT
*max_size
,
3223 unsigned HOST_WIDE_INT
*probable_max_size
)
3225 if (CONST_INT_P (len_rtx
))
3227 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3233 enum value_range_kind range_type
= VR_UNDEFINED
;
3235 /* Determine bounds from the type. */
3236 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3237 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3240 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3241 *probable_max_size
= *max_size
3242 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3244 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3246 if (TREE_CODE (len
) == SSA_NAME
)
3247 range_type
= get_range_info (len
, &min
, &max
);
3248 if (range_type
== VR_RANGE
)
3250 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3251 *min_size
= min
.to_uhwi ();
3252 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3253 *probable_max_size
= *max_size
= max
.to_uhwi ();
3255 else if (range_type
== VR_ANTI_RANGE
)
3257 /* Anti range 0...N lets us to determine minimal size to N+1. */
3260 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3261 *min_size
= max
.to_uhwi () + 1;
3269 Produce anti range allowing negative values of N. We still
3270 can use the information and make a guess that N is not negative.
3272 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3273 *probable_max_size
= min
.to_uhwi () - 1;
3276 gcc_checking_assert (*max_size
<=
3277 (unsigned HOST_WIDE_INT
)
3278 GET_MODE_MASK (GET_MODE (len_rtx
)));
3281 /* Try to verify that the sizes and lengths of the arguments to a string
3282 manipulation function given by EXP are within valid bounds and that
3283 the operation does not lead to buffer overflow or read past the end.
3284 Arguments other than EXP may be null. When non-null, the arguments
3285 have the following meaning:
3286 DST is the destination of a copy call or NULL otherwise.
3287 SRC is the source of a copy call or NULL otherwise.
3288 DSTWRITE is the number of bytes written into the destination obtained
3289 from the user-supplied size argument to the function (such as in
3290 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3291 MAXREAD is the user-supplied bound on the length of the source sequence
3292 (such as in strncat(d, s, N). It specifies the upper limit on the number
3293 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3294 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3295 expression EXP is a string function call (as opposed to a memory call
3296 like memcpy). As an exception, SRCSTR can also be an integer denoting
3297 the precomputed size of the source string or object (for functions like
3299 DSTSIZE is the size of the destination object specified by the last
3300 argument to the _chk builtins, typically resulting from the expansion
3301 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3304 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3307 If the call is successfully verified as safe return true, otherwise
3311 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3312 tree maxread
, tree srcstr
, tree dstsize
)
3314 int opt
= OPT_Wstringop_overflow_
;
3316 /* The size of the largest object is half the address space, or
3317 PTRDIFF_MAX. (This is way too permissive.) */
3318 tree maxobjsize
= max_object_size ();
3320 /* Either the length of the source string for string functions or
3321 the size of the source object for raw memory functions. */
3322 tree slen
= NULL_TREE
;
3324 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3326 /* Set to true when the exact number of bytes written by a string
3327 function like strcpy is not known and the only thing that is
3328 known is that it must be at least one (for the terminating nul). */
3329 bool at_least_one
= false;
3332 /* SRCSTR is normally a pointer to string but as a special case
3333 it can be an integer denoting the length of a string. */
3334 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3336 /* Try to determine the range of lengths the source string
3337 refers to. If it can be determined and is less than
3338 the upper bound given by MAXREAD add one to it for
3339 the terminating nul. Otherwise, set it to one for
3340 the same reason, or to MAXREAD as appropriate. */
3341 get_range_strlen (srcstr
, range
);
3342 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3344 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3345 range
[0] = range
[1] = maxread
;
3347 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3348 range
[0], size_one_node
);
3350 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3352 else if (!integer_all_onesp (range
[1]))
3353 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3354 range
[1], size_one_node
);
3360 at_least_one
= true;
3361 slen
= size_one_node
;
3368 if (!dstwrite
&& !maxread
)
3370 /* When the only available piece of data is the object size
3371 there is nothing to do. */
3375 /* Otherwise, when the length of the source sequence is known
3376 (as with strlen), set DSTWRITE to it. */
3382 dstsize
= maxobjsize
;
3385 get_size_range (dstwrite
, range
);
3387 tree func
= get_callee_fndecl (exp
);
3389 /* First check the number of bytes to be written against the maximum
3392 && TREE_CODE (range
[0]) == INTEGER_CST
3393 && tree_int_cst_lt (maxobjsize
, range
[0]))
3395 if (TREE_NO_WARNING (exp
))
3398 location_t loc
= tree_nonartificial_location (exp
);
3399 loc
= expansion_point_location_if_in_system_header (loc
);
3402 if (range
[0] == range
[1])
3403 warned
= warning_at (loc
, opt
,
3404 "%K%qD specified size %E "
3405 "exceeds maximum object size %E",
3406 exp
, func
, range
[0], maxobjsize
);
3408 warned
= warning_at (loc
, opt
,
3409 "%K%qD specified size between %E and %E "
3410 "exceeds maximum object size %E",
3412 range
[0], range
[1], maxobjsize
);
3414 TREE_NO_WARNING (exp
) = true;
3419 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3420 constant, and in range of unsigned HOST_WIDE_INT. */
3421 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3423 /* Next check the number of bytes to be written against the destination
3425 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3428 && TREE_CODE (range
[0]) == INTEGER_CST
3429 && ((tree_fits_uhwi_p (dstsize
)
3430 && tree_int_cst_lt (dstsize
, range
[0]))
3432 && tree_fits_uhwi_p (dstwrite
)
3433 && tree_int_cst_lt (dstwrite
, range
[0]))))
3435 if (TREE_NO_WARNING (exp
))
3438 location_t loc
= tree_nonartificial_location (exp
);
3439 loc
= expansion_point_location_if_in_system_header (loc
);
3441 if (dstwrite
== slen
&& at_least_one
)
3443 /* This is a call to strcpy with a destination of 0 size
3444 and a source of unknown length. The call will write
3445 at least one byte past the end of the destination. */
3446 warning_at (loc
, opt
,
3447 "%K%qD writing %E or more bytes into a region "
3448 "of size %E overflows the destination",
3449 exp
, func
, range
[0], dstsize
);
3451 else if (tree_int_cst_equal (range
[0], range
[1]))
3452 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3453 "%K%qD writing %E byte into a region "
3454 "of size %E overflows the destination",
3455 "%K%qD writing %E bytes into a region "
3456 "of size %E overflows the destination",
3457 exp
, func
, range
[0], dstsize
);
3458 else if (tree_int_cst_sign_bit (range
[1]))
3460 /* Avoid printing the upper bound if it's invalid. */
3461 warning_at (loc
, opt
,
3462 "%K%qD writing %E or more bytes into a region "
3463 "of size %E overflows the destination",
3464 exp
, func
, range
[0], dstsize
);
3467 warning_at (loc
, opt
,
3468 "%K%qD writing between %E and %E bytes into "
3469 "a region of size %E overflows the destination",
3470 exp
, func
, range
[0], range
[1],
3473 /* Return error when an overflow has been detected. */
3478 /* Check the maximum length of the source sequence against the size
3479 of the destination object if known, or against the maximum size
3483 get_size_range (maxread
, range
);
3485 /* Use the lower end for MAXREAD from now on. */
3489 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3491 location_t loc
= tree_nonartificial_location (exp
);
3492 loc
= expansion_point_location_if_in_system_header (loc
);
3494 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3496 if (TREE_NO_WARNING (exp
))
3499 /* Warn about crazy big sizes first since that's more
3500 likely to be meaningful than saying that the bound
3501 is greater than the object size if both are big. */
3502 if (range
[0] == range
[1])
3503 warning_at (loc
, opt
,
3504 "%K%qD specified bound %E "
3505 "exceeds maximum object size %E",
3507 range
[0], maxobjsize
);
3509 warning_at (loc
, opt
,
3510 "%K%qD specified bound between %E and %E "
3511 "exceeds maximum object size %E",
3513 range
[0], range
[1], maxobjsize
);
3518 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3520 if (TREE_NO_WARNING (exp
))
3523 if (tree_int_cst_equal (range
[0], range
[1]))
3524 warning_at (loc
, opt
,
3525 "%K%qD specified bound %E "
3526 "exceeds destination size %E",
3530 warning_at (loc
, opt
,
3531 "%K%qD specified bound between %E and %E "
3532 "exceeds destination size %E",
3534 range
[0], range
[1], dstsize
);
3540 /* Check for reading past the end of SRC. */
3543 && dstwrite
&& range
[0]
3544 && tree_int_cst_lt (slen
, range
[0]))
3546 if (TREE_NO_WARNING (exp
))
3549 location_t loc
= tree_nonartificial_location (exp
);
3551 if (tree_int_cst_equal (range
[0], range
[1]))
3552 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3553 "%K%qD reading %E byte from a region of size %E",
3554 "%K%qD reading %E bytes from a region of size %E",
3555 exp
, func
, range
[0], slen
);
3556 else if (tree_int_cst_sign_bit (range
[1]))
3558 /* Avoid printing the upper bound if it's invalid. */
3559 warning_at (loc
, opt
,
3560 "%K%qD reading %E or more bytes from a region "
3562 exp
, func
, range
[0], slen
);
3565 warning_at (loc
, opt
,
3566 "%K%qD reading between %E and %E bytes from a region "
3568 exp
, func
, range
[0], range
[1], slen
);
3575 /* Helper to compute the size of the object referenced by the DEST
3576 expression which must have pointer type, using Object Size type
3577 OSTYPE (only the least significant 2 bits are used). Return
3578 an estimate of the size of the object if successful or NULL when
3579 the size cannot be determined. When the referenced object involves
3580 a non-constant offset in some range the returned value represents
3581 the largest size given the smallest non-negative offset in the
3582 range. The function is intended for diagnostics and should not
3583 be used to influence code generation or optimization. */
3586 compute_objsize (tree dest
, int ostype
)
3588 unsigned HOST_WIDE_INT size
;
3590 /* Only the two least significant bits are meaningful. */
3593 if (compute_builtin_object_size (dest
, ostype
, &size
))
3594 return build_int_cst (sizetype
, size
);
3596 if (TREE_CODE (dest
) == SSA_NAME
)
3598 gimple
*stmt
= SSA_NAME_DEF_STMT (dest
);
3599 if (!is_gimple_assign (stmt
))
3602 dest
= gimple_assign_rhs1 (stmt
);
3604 tree_code code
= gimple_assign_rhs_code (stmt
);
3605 if (code
== POINTER_PLUS_EXPR
)
3607 /* compute_builtin_object_size fails for addresses with
3608 non-constant offsets. Try to determine the range of
3609 such an offset here and use it to adjust the constant
3611 tree off
= gimple_assign_rhs2 (stmt
);
3612 if (TREE_CODE (off
) == INTEGER_CST
)
3614 if (tree size
= compute_objsize (dest
, ostype
))
3616 wide_int wioff
= wi::to_wide (off
);
3617 wide_int wisiz
= wi::to_wide (size
);
3619 /* Ignore negative offsets for now. For others,
3620 use the lower bound as the most optimistic
3621 estimate of the (remaining) size. */
3622 if (wi::sign_mask (wioff
))
3624 else if (wi::ltu_p (wioff
, wisiz
))
3625 return wide_int_to_tree (TREE_TYPE (size
),
3626 wi::sub (wisiz
, wioff
));
3628 return size_zero_node
;
3631 else if (TREE_CODE (off
) == SSA_NAME
3632 && INTEGRAL_TYPE_P (TREE_TYPE (off
)))
3635 enum value_range_kind rng
= get_range_info (off
, &min
, &max
);
3637 if (rng
== VR_RANGE
)
3639 if (tree size
= compute_objsize (dest
, ostype
))
3641 wide_int wisiz
= wi::to_wide (size
);
3643 /* Ignore negative offsets for now. For others,
3644 use the lower bound as the most optimistic
3645 estimate of the (remaining)size. */
3646 if (wi::sign_mask (min
))
3648 else if (wi::ltu_p (min
, wisiz
))
3649 return wide_int_to_tree (TREE_TYPE (size
),
3650 wi::sub (wisiz
, min
));
3652 return size_zero_node
;
3657 else if (code
!= ADDR_EXPR
)
3661 /* Unless computing the largest size (for memcpy and other raw memory
3662 functions), try to determine the size of the object from its type. */
3666 if (TREE_CODE (dest
) != ADDR_EXPR
)
3669 tree type
= TREE_TYPE (dest
);
3670 if (TREE_CODE (type
) == POINTER_TYPE
)
3671 type
= TREE_TYPE (type
);
3673 type
= TYPE_MAIN_VARIANT (type
);
3675 if (TREE_CODE (type
) == ARRAY_TYPE
3676 && !array_at_struct_end_p (TREE_OPERAND (dest
, 0)))
3678 /* Return the constant size unless it's zero (that's a zero-length
3679 array likely at the end of a struct). */
3680 tree size
= TYPE_SIZE_UNIT (type
);
3681 if (size
&& TREE_CODE (size
) == INTEGER_CST
3682 && !integer_zerop (size
))
3689 /* Helper to determine and check the sizes of the source and the destination
3690 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3691 call expression, DEST is the destination argument, SRC is the source
3692 argument or null, and LEN is the number of bytes. Use Object Size type-0
3693 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3694 (no overflow or invalid sizes), false otherwise. */
3697 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
3699 /* For functions like memset and memcpy that operate on raw memory
3700 try to determine the size of the largest source and destination
3701 object using type-0 Object Size regardless of the object size
3702 type specified by the option. */
3703 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3704 tree dstsize
= compute_objsize (dest
, 0);
3706 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
3710 /* Validate memchr arguments without performing any expansion.
3714 expand_builtin_memchr (tree exp
, rtx
)
3716 if (!validate_arglist (exp
,
3717 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3720 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3721 tree len
= CALL_EXPR_ARG (exp
, 2);
3723 /* Diagnose calls where the specified length exceeds the size
3725 if (warn_stringop_overflow
)
3727 tree size
= compute_objsize (arg1
, 0);
3728 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
3729 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
3735 /* Expand a call EXP to the memcpy builtin.
3736 Return NULL_RTX if we failed, the caller should emit a normal call,
3737 otherwise try to get the result in TARGET, if convenient (and in
3738 mode MODE if that's convenient). */
3741 expand_builtin_memcpy (tree exp
, rtx target
)
3743 if (!validate_arglist (exp
,
3744 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3747 tree dest
= CALL_EXPR_ARG (exp
, 0);
3748 tree src
= CALL_EXPR_ARG (exp
, 1);
3749 tree len
= CALL_EXPR_ARG (exp
, 2);
3751 check_memop_access (exp
, dest
, src
, len
);
3753 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3757 /* Check a call EXP to the memmove built-in for validity.
3758 Return NULL_RTX on both success and failure. */
3761 expand_builtin_memmove (tree exp
, rtx
)
3763 if (!validate_arglist (exp
,
3764 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3767 tree dest
= CALL_EXPR_ARG (exp
, 0);
3768 tree src
= CALL_EXPR_ARG (exp
, 1);
3769 tree len
= CALL_EXPR_ARG (exp
, 2);
3771 check_memop_access (exp
, dest
, src
, len
);
3776 /* Expand a call EXP to the mempcpy builtin.
3777 Return NULL_RTX if we failed; the caller should emit a normal call,
3778 otherwise try to get the result in TARGET, if convenient (and in
3779 mode MODE if that's convenient). If ENDP is 0 return the
3780 destination pointer, if ENDP is 1 return the end pointer ala
3781 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3785 expand_builtin_mempcpy (tree exp
, rtx target
)
3787 if (!validate_arglist (exp
,
3788 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3791 tree dest
= CALL_EXPR_ARG (exp
, 0);
3792 tree src
= CALL_EXPR_ARG (exp
, 1);
3793 tree len
= CALL_EXPR_ARG (exp
, 2);
3795 /* Policy does not generally allow using compute_objsize (which
3796 is used internally by check_memop_size) to change code generation
3797 or drive optimization decisions.
3799 In this instance it is safe because the code we generate has
3800 the same semantics regardless of the return value of
3801 check_memop_sizes. Exactly the same amount of data is copied
3802 and the return value is exactly the same in both cases.
3804 Furthermore, check_memop_size always uses mode 0 for the call to
3805 compute_objsize, so the imprecise nature of compute_objsize is
3808 /* Avoid expanding mempcpy into memcpy when the call is determined
3809 to overflow the buffer. This also prevents the same overflow
3810 from being diagnosed again when expanding memcpy. */
3811 if (!check_memop_access (exp
, dest
, src
, len
))
3814 return expand_builtin_mempcpy_args (dest
, src
, len
,
3815 target
, exp
, /*endp=*/ 1);
3818 /* Helper function to do the actual work for expand of memory copy family
3819 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3820 of memory from SRC to DEST and assign to TARGET if convenient.
3821 If ENDP is 0 return the
3822 destination pointer, if ENDP is 1 return the end pointer ala
3823 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3827 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3828 rtx target
, tree exp
, int endp
)
3830 const char *src_str
;
3831 unsigned int src_align
= get_pointer_alignment (src
);
3832 unsigned int dest_align
= get_pointer_alignment (dest
);
3833 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3834 HOST_WIDE_INT expected_size
= -1;
3835 unsigned int expected_align
= 0;
3836 unsigned HOST_WIDE_INT min_size
;
3837 unsigned HOST_WIDE_INT max_size
;
3838 unsigned HOST_WIDE_INT probable_max_size
;
3840 /* If DEST is not a pointer type, call the normal function. */
3841 if (dest_align
== 0)
3844 /* If either SRC is not a pointer type, don't do this
3845 operation in-line. */
3849 if (currently_expanding_gimple_stmt
)
3850 stringop_block_profile (currently_expanding_gimple_stmt
,
3851 &expected_align
, &expected_size
);
3853 if (expected_align
< dest_align
)
3854 expected_align
= dest_align
;
3855 dest_mem
= get_memory_rtx (dest
, len
);
3856 set_mem_align (dest_mem
, dest_align
);
3857 len_rtx
= expand_normal (len
);
3858 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3859 &probable_max_size
);
3860 src_str
= c_getstr (src
);
3862 /* If SRC is a string constant and block move would be done
3863 by pieces, we can avoid loading the string from memory
3864 and only stored the computed constants. */
3866 && CONST_INT_P (len_rtx
)
3867 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3868 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3869 CONST_CAST (char *, src_str
),
3872 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3873 builtin_memcpy_read_str
,
3874 CONST_CAST (char *, src_str
),
3875 dest_align
, false, endp
);
3876 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3877 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3881 src_mem
= get_memory_rtx (src
, len
);
3882 set_mem_align (src_mem
, src_align
);
3884 /* Copy word part most expediently. */
3885 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3886 if (CALL_EXPR_TAILCALL (exp
) && (endp
== 0 || target
== const0_rtx
))
3887 method
= BLOCK_OP_TAILCALL
;
3888 if (endp
== 1 && target
!= const0_rtx
)
3889 method
= BLOCK_OP_NO_LIBCALL_RET
;
3890 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3891 expected_align
, expected_size
,
3892 min_size
, max_size
, probable_max_size
);
3893 if (dest_addr
== pc_rtx
)
3898 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3899 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3902 if (endp
&& target
!= const0_rtx
)
3904 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3905 /* stpcpy pointer to last byte. */
3907 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3914 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3915 rtx target
, tree orig_exp
, int endp
)
3917 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3921 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3922 we failed, the caller should emit a normal call, otherwise try to
3923 get the result in TARGET, if convenient. If ENDP is 0 return the
3924 destination pointer, if ENDP is 1 return the end pointer ala
3925 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3929 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3931 struct expand_operand ops
[3];
3935 if (!targetm
.have_movstr ())
3938 dest_mem
= get_memory_rtx (dest
, NULL
);
3939 src_mem
= get_memory_rtx (src
, NULL
);
3942 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3943 dest_mem
= replace_equiv_address (dest_mem
, target
);
3946 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3947 create_fixed_operand (&ops
[1], dest_mem
);
3948 create_fixed_operand (&ops
[2], src_mem
);
3949 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3952 if (endp
&& target
!= const0_rtx
)
3954 target
= ops
[0].value
;
3955 /* movstr is supposed to set end to the address of the NUL
3956 terminator. If the caller requested a mempcpy-like return value,
3960 rtx tem
= plus_constant (GET_MODE (target
),
3961 gen_lowpart (GET_MODE (target
), target
), 1);
3962 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3968 /* Do some very basic size validation of a call to the strcpy builtin
3969 given by EXP. Return NULL_RTX to have the built-in expand to a call
3970 to the library function. */
3973 expand_builtin_strcat (tree exp
, rtx
)
3975 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3976 || !warn_stringop_overflow
)
3979 tree dest
= CALL_EXPR_ARG (exp
, 0);
3980 tree src
= CALL_EXPR_ARG (exp
, 1);
3982 /* There is no way here to determine the length of the string in
3983 the destination to which the SRC string is being appended so
3984 just diagnose cases when the souce string is longer than
3985 the destination object. */
3987 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3989 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
3995 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3996 NULL_RTX if we failed the caller should emit a normal call, otherwise
3997 try to get the result in TARGET, if convenient (and in mode MODE if that's
4001 expand_builtin_strcpy (tree exp
, rtx target
)
4003 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4006 tree dest
= CALL_EXPR_ARG (exp
, 0);
4007 tree src
= CALL_EXPR_ARG (exp
, 1);
4009 if (warn_stringop_overflow
)
4011 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4012 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4016 if (rtx ret
= expand_builtin_strcpy_args (exp
, dest
, src
, target
))
4018 /* Check to see if the argument was declared attribute nonstring
4019 and if so, issue a warning since at this point it's not known
4020 to be nul-terminated. */
4021 tree fndecl
= get_callee_fndecl (exp
);
4022 maybe_warn_nonstring_arg (fndecl
, exp
);
4029 /* Helper function to do the actual work for expand_builtin_strcpy. The
4030 arguments to the builtin_strcpy call DEST and SRC are broken out
4031 so that this can also be called without constructing an actual CALL_EXPR.
4032 The other arguments and return value are the same as for
4033 expand_builtin_strcpy. */
4036 expand_builtin_strcpy_args (tree exp
, tree dest
, tree src
, rtx target
)
4038 /* Detect strcpy calls with unterminated arrays.. */
4039 if (tree nonstr
= unterminated_array (src
))
4041 /* NONSTR refers to the non-nul terminated constant array. */
4042 if (!TREE_NO_WARNING (exp
))
4043 warn_string_no_nul (EXPR_LOCATION (exp
), "strcpy", src
, nonstr
);
4047 return expand_movstr (dest
, src
, target
, /*endp=*/0);
4050 /* Expand a call EXP to the stpcpy builtin.
4051 Return NULL_RTX if we failed the caller should emit a normal call,
4052 otherwise try to get the result in TARGET, if convenient (and in
4053 mode MODE if that's convenient). */
4056 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
4059 location_t loc
= EXPR_LOCATION (exp
);
4061 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4064 dst
= CALL_EXPR_ARG (exp
, 0);
4065 src
= CALL_EXPR_ARG (exp
, 1);
4067 if (warn_stringop_overflow
)
4069 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
4070 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4074 /* If return value is ignored, transform stpcpy into strcpy. */
4075 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
4077 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
4078 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
4079 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4086 /* Ensure we get an actual string whose length can be evaluated at
4087 compile-time, not an expression containing a string. This is
4088 because the latter will potentially produce pessimized code
4089 when used to produce the return value. */
4091 memset (&data
, 0, sizeof (c_strlen_data
));
4092 if (!c_getstr (src
, NULL
)
4093 || !(len
= c_strlen (src
, 0, &data
, 1)))
4094 return expand_movstr (dst
, src
, target
, /*endp=*/2);
4096 if (data
.decl
&& !TREE_NO_WARNING (exp
))
4097 warn_string_no_nul (EXPR_LOCATION (exp
), "stpcpy", src
, data
.decl
);
4099 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
4100 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
4101 target
, exp
, /*endp=*/2);
4106 if (TREE_CODE (len
) == INTEGER_CST
)
4108 rtx len_rtx
= expand_normal (len
);
4110 if (CONST_INT_P (len_rtx
))
4112 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
4118 if (mode
!= VOIDmode
)
4119 target
= gen_reg_rtx (mode
);
4121 target
= gen_reg_rtx (GET_MODE (ret
));
4123 if (GET_MODE (target
) != GET_MODE (ret
))
4124 ret
= gen_lowpart (GET_MODE (target
), ret
);
4126 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
4127 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
4135 return expand_movstr (dst
, src
, target
, /*endp=*/2);
4139 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4140 arguments while being careful to avoid duplicate warnings (which could
4141 be issued if the expander were to expand the call, resulting in it
4142 being emitted in expand_call(). */
4145 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
4147 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
4149 /* The call has been successfully expanded. Check for nonstring
4150 arguments and issue warnings as appropriate. */
4151 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
4158 /* Check a call EXP to the stpncpy built-in for validity.
4159 Return NULL_RTX on both success and failure. */
4162 expand_builtin_stpncpy (tree exp
, rtx
)
4164 if (!validate_arglist (exp
,
4165 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4166 || !warn_stringop_overflow
)
4169 /* The source and destination of the call. */
4170 tree dest
= CALL_EXPR_ARG (exp
, 0);
4171 tree src
= CALL_EXPR_ARG (exp
, 1);
4173 /* The exact number of bytes to write (not the maximum). */
4174 tree len
= CALL_EXPR_ARG (exp
, 2);
4176 /* The size of the destination object. */
4177 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4179 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
);
4184 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4185 bytes from constant string DATA + OFFSET and return it as target
4189 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
4190 scalar_int_mode mode
)
4192 const char *str
= (const char *) data
;
4194 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
4197 return c_readstr (str
+ offset
, mode
);
4200 /* Helper to check the sizes of sequences and the destination of calls
4201 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4202 success (no overflow or invalid sizes), false otherwise. */
4205 check_strncat_sizes (tree exp
, tree objsize
)
4207 tree dest
= CALL_EXPR_ARG (exp
, 0);
4208 tree src
= CALL_EXPR_ARG (exp
, 1);
4209 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4211 /* Try to determine the range of lengths that the source expression
4214 get_range_strlen (src
, lenrange
);
4216 /* Try to verify that the destination is big enough for the shortest
4219 if (!objsize
&& warn_stringop_overflow
)
4221 /* If it hasn't been provided by __strncat_chk, try to determine
4222 the size of the destination object into which the source is
4224 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4227 /* Add one for the terminating nul. */
4228 tree srclen
= (lenrange
[0]
4229 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4233 /* The strncat function copies at most MAXREAD bytes and always appends
4234 the terminating nul so the specified upper bound should never be equal
4235 to (or greater than) the size of the destination. */
4236 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
4237 && tree_int_cst_equal (objsize
, maxread
))
4239 location_t loc
= tree_nonartificial_location (exp
);
4240 loc
= expansion_point_location_if_in_system_header (loc
);
4242 warning_at (loc
, OPT_Wstringop_overflow_
,
4243 "%K%qD specified bound %E equals destination size",
4244 exp
, get_callee_fndecl (exp
), maxread
);
4250 || (maxread
&& tree_fits_uhwi_p (maxread
)
4251 && tree_fits_uhwi_p (srclen
)
4252 && tree_int_cst_lt (maxread
, srclen
)))
4255 /* The number of bytes to write is LEN but check_access will also
4256 check SRCLEN if LEN's value isn't known. */
4257 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
4261 /* Similar to expand_builtin_strcat, do some very basic size validation
4262 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4263 the built-in expand to a call to the library function. */
4266 expand_builtin_strncat (tree exp
, rtx
)
4268 if (!validate_arglist (exp
,
4269 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4270 || !warn_stringop_overflow
)
4273 tree dest
= CALL_EXPR_ARG (exp
, 0);
4274 tree src
= CALL_EXPR_ARG (exp
, 1);
4275 /* The upper bound on the number of bytes to write. */
4276 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4277 /* The length of the source sequence. */
4278 tree slen
= c_strlen (src
, 1);
4280 /* Try to determine the range of lengths that the source expression
4284 lenrange
[0] = lenrange
[1] = slen
;
4286 get_range_strlen (src
, lenrange
);
4288 /* Try to verify that the destination is big enough for the shortest
4289 string. First try to determine the size of the destination object
4290 into which the source is being copied. */
4291 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4293 /* Add one for the terminating nul. */
4294 tree srclen
= (lenrange
[0]
4295 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4299 /* The strncat function copies at most MAXREAD bytes and always appends
4300 the terminating nul so the specified upper bound should never be equal
4301 to (or greater than) the size of the destination. */
4302 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4303 && tree_int_cst_equal (destsize
, maxread
))
4305 location_t loc
= tree_nonartificial_location (exp
);
4306 loc
= expansion_point_location_if_in_system_header (loc
);
4308 warning_at (loc
, OPT_Wstringop_overflow_
,
4309 "%K%qD specified bound %E equals destination size",
4310 exp
, get_callee_fndecl (exp
), maxread
);
4316 || (maxread
&& tree_fits_uhwi_p (maxread
)
4317 && tree_fits_uhwi_p (srclen
)
4318 && tree_int_cst_lt (maxread
, srclen
)))
4321 /* The number of bytes to write is SRCLEN. */
4322 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
);
4327 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4328 NULL_RTX if we failed the caller should emit a normal call. */
4331 expand_builtin_strncpy (tree exp
, rtx target
)
4333 location_t loc
= EXPR_LOCATION (exp
);
4335 if (validate_arglist (exp
,
4336 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4338 tree dest
= CALL_EXPR_ARG (exp
, 0);
4339 tree src
= CALL_EXPR_ARG (exp
, 1);
4340 /* The number of bytes to write (not the maximum). */
4341 tree len
= CALL_EXPR_ARG (exp
, 2);
4342 /* The length of the source sequence. */
4343 tree slen
= c_strlen (src
, 1);
4345 if (warn_stringop_overflow
)
4347 tree destsize
= compute_objsize (dest
,
4348 warn_stringop_overflow
- 1);
4350 /* The number of bytes to write is LEN but check_access will also
4351 check SLEN if LEN's value isn't known. */
4352 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4356 /* We must be passed a constant len and src parameter. */
4357 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4360 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4362 /* We're required to pad with trailing zeros if the requested
4363 len is greater than strlen(s2)+1. In that case try to
4364 use store_by_pieces, if it fails, punt. */
4365 if (tree_int_cst_lt (slen
, len
))
4367 unsigned int dest_align
= get_pointer_alignment (dest
);
4368 const char *p
= c_getstr (src
);
4371 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4372 || !can_store_by_pieces (tree_to_uhwi (len
),
4373 builtin_strncpy_read_str
,
4374 CONST_CAST (char *, p
),
4378 dest_mem
= get_memory_rtx (dest
, len
);
4379 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4380 builtin_strncpy_read_str
,
4381 CONST_CAST (char *, p
), dest_align
, false, 0);
4382 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4383 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4390 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4391 bytes from constant string DATA + OFFSET and return it as target
4395 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4396 scalar_int_mode mode
)
4398 const char *c
= (const char *) data
;
4399 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4401 memset (p
, *c
, GET_MODE_SIZE (mode
));
4403 return c_readstr (p
, mode
);
4406 /* Callback routine for store_by_pieces. Return the RTL of a register
4407 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4408 char value given in the RTL register data. For example, if mode is
4409 4 bytes wide, return the RTL for 0x01010101*data. */
4412 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4413 scalar_int_mode mode
)
4419 size
= GET_MODE_SIZE (mode
);
4423 p
= XALLOCAVEC (char, size
);
4424 memset (p
, 1, size
);
4425 coeff
= c_readstr (p
, mode
);
4427 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4428 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4429 return force_reg (mode
, target
);
4432 /* Expand expression EXP, which is a call to the memset builtin. Return
4433 NULL_RTX if we failed the caller should emit a normal call, otherwise
4434 try to get the result in TARGET, if convenient (and in mode MODE if that's
4438 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4440 if (!validate_arglist (exp
,
4441 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4444 tree dest
= CALL_EXPR_ARG (exp
, 0);
4445 tree val
= CALL_EXPR_ARG (exp
, 1);
4446 tree len
= CALL_EXPR_ARG (exp
, 2);
4448 check_memop_access (exp
, dest
, NULL_TREE
, len
);
4450 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4453 /* Helper function to do the actual work for expand_builtin_memset. The
4454 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4455 so that this can also be called without constructing an actual CALL_EXPR.
4456 The other arguments and return value are the same as for
4457 expand_builtin_memset. */
4460 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4461 rtx target
, machine_mode mode
, tree orig_exp
)
4464 enum built_in_function fcode
;
4465 machine_mode val_mode
;
4467 unsigned int dest_align
;
4468 rtx dest_mem
, dest_addr
, len_rtx
;
4469 HOST_WIDE_INT expected_size
= -1;
4470 unsigned int expected_align
= 0;
4471 unsigned HOST_WIDE_INT min_size
;
4472 unsigned HOST_WIDE_INT max_size
;
4473 unsigned HOST_WIDE_INT probable_max_size
;
4475 dest_align
= get_pointer_alignment (dest
);
4477 /* If DEST is not a pointer type, don't do this operation in-line. */
4478 if (dest_align
== 0)
4481 if (currently_expanding_gimple_stmt
)
4482 stringop_block_profile (currently_expanding_gimple_stmt
,
4483 &expected_align
, &expected_size
);
4485 if (expected_align
< dest_align
)
4486 expected_align
= dest_align
;
4488 /* If the LEN parameter is zero, return DEST. */
4489 if (integer_zerop (len
))
4491 /* Evaluate and ignore VAL in case it has side-effects. */
4492 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4493 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4496 /* Stabilize the arguments in case we fail. */
4497 dest
= builtin_save_expr (dest
);
4498 val
= builtin_save_expr (val
);
4499 len
= builtin_save_expr (len
);
4501 len_rtx
= expand_normal (len
);
4502 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4503 &probable_max_size
);
4504 dest_mem
= get_memory_rtx (dest
, len
);
4505 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4507 if (TREE_CODE (val
) != INTEGER_CST
)
4511 val_rtx
= expand_normal (val
);
4512 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4514 /* Assume that we can memset by pieces if we can store
4515 * the coefficients by pieces (in the required modes).
4516 * We can't pass builtin_memset_gen_str as that emits RTL. */
4518 if (tree_fits_uhwi_p (len
)
4519 && can_store_by_pieces (tree_to_uhwi (len
),
4520 builtin_memset_read_str
, &c
, dest_align
,
4523 val_rtx
= force_reg (val_mode
, val_rtx
);
4524 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4525 builtin_memset_gen_str
, val_rtx
, dest_align
,
4528 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4529 dest_align
, expected_align
,
4530 expected_size
, min_size
, max_size
,
4534 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4535 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4539 if (target_char_cast (val
, &c
))
4544 if (tree_fits_uhwi_p (len
)
4545 && can_store_by_pieces (tree_to_uhwi (len
),
4546 builtin_memset_read_str
, &c
, dest_align
,
4548 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4549 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4550 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4551 gen_int_mode (c
, val_mode
),
4552 dest_align
, expected_align
,
4553 expected_size
, min_size
, max_size
,
4557 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4558 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4562 set_mem_align (dest_mem
, dest_align
);
4563 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4564 CALL_EXPR_TAILCALL (orig_exp
)
4565 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4566 expected_align
, expected_size
,
4572 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4573 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4579 fndecl
= get_callee_fndecl (orig_exp
);
4580 fcode
= DECL_FUNCTION_CODE (fndecl
);
4581 if (fcode
== BUILT_IN_MEMSET
)
4582 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4584 else if (fcode
== BUILT_IN_BZERO
)
4585 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4589 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4590 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4591 return expand_call (fn
, target
, target
== const0_rtx
);
4594 /* Expand expression EXP, which is a call to the bzero builtin. Return
4595 NULL_RTX if we failed the caller should emit a normal call. */
4598 expand_builtin_bzero (tree exp
)
4600 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4603 tree dest
= CALL_EXPR_ARG (exp
, 0);
4604 tree size
= CALL_EXPR_ARG (exp
, 1);
4606 check_memop_access (exp
, dest
, NULL_TREE
, size
);
4608 /* New argument list transforming bzero(ptr x, int y) to
4609 memset(ptr x, int 0, size_t y). This is done this way
4610 so that if it isn't expanded inline, we fallback to
4611 calling bzero instead of memset. */
4613 location_t loc
= EXPR_LOCATION (exp
);
4615 return expand_builtin_memset_args (dest
, integer_zero_node
,
4616 fold_convert_loc (loc
,
4617 size_type_node
, size
),
4618 const0_rtx
, VOIDmode
, exp
);
4621 /* Try to expand cmpstr operation ICODE with the given operands.
4622 Return the result rtx on success, otherwise return null. */
4625 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4626 HOST_WIDE_INT align
)
4628 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4630 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4633 struct expand_operand ops
[4];
4634 create_output_operand (&ops
[0], target
, insn_mode
);
4635 create_fixed_operand (&ops
[1], arg1_rtx
);
4636 create_fixed_operand (&ops
[2], arg2_rtx
);
4637 create_integer_operand (&ops
[3], align
);
4638 if (maybe_expand_insn (icode
, 4, ops
))
4639 return ops
[0].value
;
4643 /* Expand expression EXP, which is a call to the memcmp built-in function.
4644 Return NULL_RTX if we failed and the caller should emit a normal call,
4645 otherwise try to get the result in TARGET, if convenient.
4646 RESULT_EQ is true if we can relax the returned value to be either zero
4647 or nonzero, without caring about the sign. */
4650 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4652 if (!validate_arglist (exp
,
4653 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4656 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4657 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4658 tree len
= CALL_EXPR_ARG (exp
, 2);
4659 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
4660 bool no_overflow
= true;
4662 /* Diagnose calls where the specified length exceeds the size of either
4664 tree size
= compute_objsize (arg1
, 0);
4665 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
4666 len
, /*maxread=*/NULL_TREE
, size
,
4667 /*objsize=*/NULL_TREE
);
4670 size
= compute_objsize (arg2
, 0);
4671 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
4672 len
, /*maxread=*/NULL_TREE
, size
,
4673 /*objsize=*/NULL_TREE
);
4676 /* If the specified length exceeds the size of either object,
4677 call the function. */
4681 /* Due to the performance benefit, always inline the calls first
4682 when result_eq is false. */
4683 rtx result
= NULL_RTX
;
4685 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
4687 result
= inline_expand_builtin_string_cmp (exp
, target
);
4692 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4693 location_t loc
= EXPR_LOCATION (exp
);
4695 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4696 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4698 /* If we don't have POINTER_TYPE, call the function. */
4699 if (arg1_align
== 0 || arg2_align
== 0)
4702 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4703 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4704 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4706 /* Set MEM_SIZE as appropriate. */
4707 if (CONST_INT_P (len_rtx
))
4709 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4710 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4713 by_pieces_constfn constfn
= NULL
;
4715 const char *src_str
= c_getstr (arg2
);
4716 if (result_eq
&& src_str
== NULL
)
4718 src_str
= c_getstr (arg1
);
4719 if (src_str
!= NULL
)
4720 std::swap (arg1_rtx
, arg2_rtx
);
4723 /* If SRC is a string constant and block move would be done
4724 by pieces, we can avoid loading the string from memory
4725 and only stored the computed constants. */
4727 && CONST_INT_P (len_rtx
)
4728 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4729 constfn
= builtin_memcpy_read_str
;
4731 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4732 TREE_TYPE (len
), target
,
4734 CONST_CAST (char *, src_str
));
4738 /* Return the value in the proper mode for this function. */
4739 if (GET_MODE (result
) == mode
)
4744 convert_move (target
, result
, 0);
4748 return convert_to_mode (mode
, result
, 0);
4754 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4755 if we failed the caller should emit a normal call, otherwise try to get
4756 the result in TARGET, if convenient. */
4759 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4761 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4764 /* Due to the performance benefit, always inline the calls first. */
4765 rtx result
= NULL_RTX
;
4766 result
= inline_expand_builtin_string_cmp (exp
, target
);
4770 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4771 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4772 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4775 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4776 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4778 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4779 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4781 /* If we don't have POINTER_TYPE, call the function. */
4782 if (arg1_align
== 0 || arg2_align
== 0)
4785 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4786 arg1
= builtin_save_expr (arg1
);
4787 arg2
= builtin_save_expr (arg2
);
4789 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4790 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4792 /* Try to call cmpstrsi. */
4793 if (cmpstr_icode
!= CODE_FOR_nothing
)
4794 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4795 MIN (arg1_align
, arg2_align
));
4797 /* Try to determine at least one length and call cmpstrnsi. */
4798 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4803 tree len1
= c_strlen (arg1
, 1);
4804 tree len2
= c_strlen (arg2
, 1);
4807 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4809 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4811 /* If we don't have a constant length for the first, use the length
4812 of the second, if we know it. We don't require a constant for
4813 this case; some cost analysis could be done if both are available
4814 but neither is constant. For now, assume they're equally cheap,
4815 unless one has side effects. If both strings have constant lengths,
4822 else if (TREE_SIDE_EFFECTS (len1
))
4824 else if (TREE_SIDE_EFFECTS (len2
))
4826 else if (TREE_CODE (len1
) != INTEGER_CST
)
4828 else if (TREE_CODE (len2
) != INTEGER_CST
)
4830 else if (tree_int_cst_lt (len1
, len2
))
4835 /* If both arguments have side effects, we cannot optimize. */
4836 if (len
&& !TREE_SIDE_EFFECTS (len
))
4838 arg3_rtx
= expand_normal (len
);
4839 result
= expand_cmpstrn_or_cmpmem
4840 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4841 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4845 tree fndecl
= get_callee_fndecl (exp
);
4848 /* Check to see if the argument was declared attribute nonstring
4849 and if so, issue a warning since at this point it's not known
4850 to be nul-terminated. */
4851 maybe_warn_nonstring_arg (fndecl
, exp
);
4853 /* Return the value in the proper mode for this function. */
4854 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4855 if (GET_MODE (result
) == mode
)
4858 return convert_to_mode (mode
, result
, 0);
4859 convert_move (target
, result
, 0);
4863 /* Expand the library call ourselves using a stabilized argument
4864 list to avoid re-evaluating the function's arguments twice. */
4865 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4866 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4867 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4868 return expand_call (fn
, target
, target
== const0_rtx
);
4871 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4872 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4873 the result in TARGET, if convenient. */
4876 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4877 ATTRIBUTE_UNUSED machine_mode mode
)
4879 if (!validate_arglist (exp
,
4880 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4883 /* Due to the performance benefit, always inline the calls first. */
4884 rtx result
= NULL_RTX
;
4885 result
= inline_expand_builtin_string_cmp (exp
, target
);
4889 /* If c_strlen can determine an expression for one of the string
4890 lengths, and it doesn't have side effects, then emit cmpstrnsi
4891 using length MIN(strlen(string)+1, arg3). */
4892 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4893 if (cmpstrn_icode
== CODE_FOR_nothing
)
4898 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4899 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4900 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4902 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4903 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4905 tree len1
= c_strlen (arg1
, 1);
4906 tree len2
= c_strlen (arg2
, 1);
4908 location_t loc
= EXPR_LOCATION (exp
);
4911 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4913 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4915 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4917 /* If we don't have a constant length for the first, use the length
4918 of the second, if we know it. If neither string is constant length,
4919 use the given length argument. We don't require a constant for
4920 this case; some cost analysis could be done if both are available
4921 but neither is constant. For now, assume they're equally cheap,
4922 unless one has side effects. If both strings have constant lengths,
4931 else if (TREE_SIDE_EFFECTS (len1
))
4933 else if (TREE_SIDE_EFFECTS (len2
))
4935 else if (TREE_CODE (len1
) != INTEGER_CST
)
4937 else if (TREE_CODE (len2
) != INTEGER_CST
)
4939 else if (tree_int_cst_lt (len1
, len2
))
4944 /* If we are not using the given length, we must incorporate it here.
4945 The actual new length parameter will be MIN(len,arg3) in this case. */
4948 len
= fold_convert_loc (loc
, sizetype
, len
);
4949 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4951 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4952 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4953 rtx arg3_rtx
= expand_normal (len
);
4954 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4955 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4956 MIN (arg1_align
, arg2_align
));
4958 tree fndecl
= get_callee_fndecl (exp
);
4961 /* Check to see if the argument was declared attribute nonstring
4962 and if so, issue a warning since at this point it's not known
4963 to be nul-terminated. */
4964 maybe_warn_nonstring_arg (fndecl
, exp
);
4966 /* Return the value in the proper mode for this function. */
4967 mode
= TYPE_MODE (TREE_TYPE (exp
));
4968 if (GET_MODE (result
) == mode
)
4971 return convert_to_mode (mode
, result
, 0);
4972 convert_move (target
, result
, 0);
4976 /* Expand the library call ourselves using a stabilized argument
4977 list to avoid re-evaluating the function's arguments twice. */
4978 tree fn
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
4979 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4980 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4981 return expand_call (fn
, target
, target
== const0_rtx
);
4984 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4985 if that's convenient. */
4988 expand_builtin_saveregs (void)
4993 /* Don't do __builtin_saveregs more than once in a function.
4994 Save the result of the first call and reuse it. */
4995 if (saveregs_value
!= 0)
4996 return saveregs_value
;
4998 /* When this function is called, it means that registers must be
4999 saved on entry to this function. So we migrate the call to the
5000 first insn of this function. */
5004 /* Do whatever the machine needs done in this case. */
5005 val
= targetm
.calls
.expand_builtin_saveregs ();
5010 saveregs_value
= val
;
5012 /* Put the insns after the NOTE that starts the function. If this
5013 is inside a start_sequence, make the outer-level insn chain current, so
5014 the code is placed at the start of the function. */
5015 push_topmost_sequence ();
5016 emit_insn_after (seq
, entry_of_function ());
5017 pop_topmost_sequence ();
5022 /* Expand a call to __builtin_next_arg. */
5025 expand_builtin_next_arg (void)
5027 /* Checking arguments is already done in fold_builtin_next_arg
5028 that must be called before this function. */
5029 return expand_binop (ptr_mode
, add_optab
,
5030 crtl
->args
.internal_arg_pointer
,
5031 crtl
->args
.arg_offset_rtx
,
5032 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
5035 /* Make it easier for the backends by protecting the valist argument
5036 from multiple evaluations. */
5039 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
5041 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
5043 /* The current way of determining the type of valist is completely
5044 bogus. We should have the information on the va builtin instead. */
5046 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
5048 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
5050 if (TREE_SIDE_EFFECTS (valist
))
5051 valist
= save_expr (valist
);
5053 /* For this case, the backends will be expecting a pointer to
5054 vatype, but it's possible we've actually been given an array
5055 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5057 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
5059 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
5060 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
5065 tree pt
= build_pointer_type (vatype
);
5069 if (! TREE_SIDE_EFFECTS (valist
))
5072 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
5073 TREE_SIDE_EFFECTS (valist
) = 1;
5076 if (TREE_SIDE_EFFECTS (valist
))
5077 valist
= save_expr (valist
);
5078 valist
= fold_build2_loc (loc
, MEM_REF
,
5079 vatype
, valist
, build_int_cst (pt
, 0));
5085 /* The "standard" definition of va_list is void*. */
5088 std_build_builtin_va_list (void)
5090 return ptr_type_node
;
5093 /* The "standard" abi va_list is va_list_type_node. */
5096 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
5098 return va_list_type_node
;
5101 /* The "standard" type of va_list is va_list_type_node. */
5104 std_canonical_va_list_type (tree type
)
5108 wtype
= va_list_type_node
;
5111 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
5113 /* If va_list is an array type, the argument may have decayed
5114 to a pointer type, e.g. by being passed to another function.
5115 In that case, unwrap both types so that we can compare the
5116 underlying records. */
5117 if (TREE_CODE (htype
) == ARRAY_TYPE
5118 || POINTER_TYPE_P (htype
))
5120 wtype
= TREE_TYPE (wtype
);
5121 htype
= TREE_TYPE (htype
);
5124 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
5125 return va_list_type_node
;
5130 /* The "standard" implementation of va_start: just assign `nextarg' to
5134 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
5136 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5137 convert_move (va_r
, nextarg
, 0);
5140 /* Expand EXP, a call to __builtin_va_start. */
5143 expand_builtin_va_start (tree exp
)
5147 location_t loc
= EXPR_LOCATION (exp
);
5149 if (call_expr_nargs (exp
) < 2)
5151 error_at (loc
, "too few arguments to function %<va_start%>");
5155 if (fold_builtin_next_arg (exp
, true))
5158 nextarg
= expand_builtin_next_arg ();
5159 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
5161 if (targetm
.expand_builtin_va_start
)
5162 targetm
.expand_builtin_va_start (valist
, nextarg
);
5164 std_expand_builtin_va_start (valist
, nextarg
);
5169 /* Expand EXP, a call to __builtin_va_end. */
5172 expand_builtin_va_end (tree exp
)
5174 tree valist
= CALL_EXPR_ARG (exp
, 0);
5176 /* Evaluate for side effects, if needed. I hate macros that don't
5178 if (TREE_SIDE_EFFECTS (valist
))
5179 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5184 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5185 builtin rather than just as an assignment in stdarg.h because of the
5186 nastiness of array-type va_list types. */
5189 expand_builtin_va_copy (tree exp
)
5192 location_t loc
= EXPR_LOCATION (exp
);
5194 dst
= CALL_EXPR_ARG (exp
, 0);
5195 src
= CALL_EXPR_ARG (exp
, 1);
5197 dst
= stabilize_va_list_loc (loc
, dst
, 1);
5198 src
= stabilize_va_list_loc (loc
, src
, 0);
5200 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5202 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5204 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5205 TREE_SIDE_EFFECTS (t
) = 1;
5206 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5210 rtx dstb
, srcb
, size
;
5212 /* Evaluate to pointers. */
5213 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5214 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5215 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5216 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5218 dstb
= convert_memory_address (Pmode
, dstb
);
5219 srcb
= convert_memory_address (Pmode
, srcb
);
5221 /* "Dereference" to BLKmode memories. */
5222 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5223 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5224 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5225 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5226 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5227 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5230 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5236 /* Expand a call to one of the builtin functions __builtin_frame_address or
5237 __builtin_return_address. */
5240 expand_builtin_frame_address (tree fndecl
, tree exp
)
5242 /* The argument must be a nonnegative integer constant.
5243 It counts the number of frames to scan up the stack.
5244 The value is either the frame pointer value or the return
5245 address saved in that frame. */
5246 if (call_expr_nargs (exp
) == 0)
5247 /* Warning about missing arg was already issued. */
5249 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
5251 error ("invalid argument to %qD", fndecl
);
5256 /* Number of frames to scan up the stack. */
5257 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5259 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5261 /* Some ports cannot access arbitrary stack frames. */
5264 warning (0, "unsupported argument to %qD", fndecl
);
5270 /* Warn since no effort is made to ensure that any frame
5271 beyond the current one exists or can be safely reached. */
5272 warning (OPT_Wframe_address
, "calling %qD with "
5273 "a nonzero argument is unsafe", fndecl
);
5276 /* For __builtin_frame_address, return what we've got. */
5277 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5281 && ! CONSTANT_P (tem
))
5282 tem
= copy_addr_to_reg (tem
);
5287 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5288 failed and the caller should emit a normal call. */
5291 expand_builtin_alloca (tree exp
)
5296 tree fndecl
= get_callee_fndecl (exp
);
5297 HOST_WIDE_INT max_size
;
5298 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5299 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5301 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5302 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5304 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5305 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5306 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5312 && warn_vla_limit
>= HOST_WIDE_INT_MAX
5313 && warn_alloc_size_limit
< warn_vla_limit
)
5315 && warn_alloca_limit
>= HOST_WIDE_INT_MAX
5316 && warn_alloc_size_limit
< warn_alloca_limit
5319 /* -Walloca-larger-than and -Wvla-larger-than settings of
5320 less than HOST_WIDE_INT_MAX override the more general
5321 -Walloc-size-larger-than so unless either of the former
5322 options is smaller than the last one (wchich would imply
5323 that the call was already checked), check the alloca
5324 arguments for overflow. */
5325 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5326 int idx
[] = { 0, -1 };
5327 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5330 /* Compute the argument. */
5331 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5333 /* Compute the alignment. */
5334 align
= (fcode
== BUILT_IN_ALLOCA
5336 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5338 /* Compute the maximum size. */
5339 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5340 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5343 /* Allocate the desired space. If the allocation stems from the declaration
5344 of a variable-sized object, it cannot accumulate. */
5346 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5347 result
= convert_memory_address (ptr_mode
, result
);
5352 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5353 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5354 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5355 handle_builtin_stack_restore function. */
5358 expand_asan_emit_allocas_unpoison (tree exp
)
5360 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5361 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5362 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5363 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5364 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
5365 stack_pointer_rtx
, NULL_RTX
, 0,
5367 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
5368 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
5370 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5371 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5372 top
, ptr_mode
, bot
, ptr_mode
);
5376 /* Expand a call to bswap builtin in EXP.
5377 Return NULL_RTX if a normal call should be emitted rather than expanding the
5378 function in-line. If convenient, the result should be placed in TARGET.
5379 SUBTARGET may be used as the target for computing one of EXP's operands. */
5382 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5388 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5391 arg
= CALL_EXPR_ARG (exp
, 0);
5392 op0
= expand_expr (arg
,
5393 subtarget
&& GET_MODE (subtarget
) == target_mode
5394 ? subtarget
: NULL_RTX
,
5395 target_mode
, EXPAND_NORMAL
);
5396 if (GET_MODE (op0
) != target_mode
)
5397 op0
= convert_to_mode (target_mode
, op0
, 1);
5399 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5401 gcc_assert (target
);
5403 return convert_to_mode (target_mode
, target
, 1);
5406 /* Expand a call to a unary builtin in EXP.
5407 Return NULL_RTX if a normal call should be emitted rather than expanding the
5408 function in-line. If convenient, the result should be placed in TARGET.
5409 SUBTARGET may be used as the target for computing one of EXP's operands. */
5412 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5413 rtx subtarget
, optab op_optab
)
5417 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5420 /* Compute the argument. */
5421 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5423 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5424 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5425 VOIDmode
, EXPAND_NORMAL
);
5426 /* Compute op, into TARGET if possible.
5427 Set TARGET to wherever the result comes back. */
5428 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5429 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5430 gcc_assert (target
);
5432 return convert_to_mode (target_mode
, target
, 0);
5435 /* Expand a call to __builtin_expect. We just return our argument
5436 as the builtin_expect semantic should've been already executed by
5437 tree branch prediction pass. */
5440 expand_builtin_expect (tree exp
, rtx target
)
5444 if (call_expr_nargs (exp
) < 2)
5446 arg
= CALL_EXPR_ARG (exp
, 0);
5448 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5449 /* When guessing was done, the hints should be already stripped away. */
5450 gcc_assert (!flag_guess_branch_prob
5451 || optimize
== 0 || seen_error ());
5455 /* Expand a call to __builtin_expect_with_probability. We just return our
5456 argument as the builtin_expect semantic should've been already executed by
5457 tree branch prediction pass. */
5460 expand_builtin_expect_with_probability (tree exp
, rtx target
)
5464 if (call_expr_nargs (exp
) < 3)
5466 arg
= CALL_EXPR_ARG (exp
, 0);
5468 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5469 /* When guessing was done, the hints should be already stripped away. */
5470 gcc_assert (!flag_guess_branch_prob
5471 || optimize
== 0 || seen_error ());
5476 /* Expand a call to __builtin_assume_aligned. We just return our first
5477 argument as the builtin_assume_aligned semantic should've been already
5481 expand_builtin_assume_aligned (tree exp
, rtx target
)
5483 if (call_expr_nargs (exp
) < 2)
5485 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5487 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5488 && (call_expr_nargs (exp
) < 3
5489 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5494 expand_builtin_trap (void)
5496 if (targetm
.have_trap ())
5498 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5499 /* For trap insns when not accumulating outgoing args force
5500 REG_ARGS_SIZE note to prevent crossjumping of calls with
5501 different args sizes. */
5502 if (!ACCUMULATE_OUTGOING_ARGS
)
5503 add_args_size_note (insn
, stack_pointer_delta
);
5507 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5508 tree call_expr
= build_call_expr (fn
, 0);
5509 expand_call (call_expr
, NULL_RTX
, false);
5515 /* Expand a call to __builtin_unreachable. We do nothing except emit
5516 a barrier saying that control flow will not pass here.
5518 It is the responsibility of the program being compiled to ensure
5519 that control flow does never reach __builtin_unreachable. */
5521 expand_builtin_unreachable (void)
5526 /* Expand EXP, a call to fabs, fabsf or fabsl.
5527 Return NULL_RTX if a normal call should be emitted rather than expanding
5528 the function inline. If convenient, the result should be placed
5529 in TARGET. SUBTARGET may be used as the target for computing
5533 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5539 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5542 arg
= CALL_EXPR_ARG (exp
, 0);
5543 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5544 mode
= TYPE_MODE (TREE_TYPE (arg
));
5545 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5546 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5549 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5550 Return NULL is a normal call should be emitted rather than expanding the
5551 function inline. If convenient, the result should be placed in TARGET.
5552 SUBTARGET may be used as the target for computing the operand. */
5555 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5560 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5563 arg
= CALL_EXPR_ARG (exp
, 0);
5564 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5566 arg
= CALL_EXPR_ARG (exp
, 1);
5567 op1
= expand_normal (arg
);
5569 return expand_copysign (op0
, op1
, target
);
5572 /* Expand a call to __builtin___clear_cache. */
5575 expand_builtin___clear_cache (tree exp
)
5577 if (!targetm
.code_for_clear_cache
)
5579 #ifdef CLEAR_INSN_CACHE
5580 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5581 does something. Just do the default expansion to a call to
5585 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5586 does nothing. There is no need to call it. Do nothing. */
5588 #endif /* CLEAR_INSN_CACHE */
5591 /* We have a "clear_cache" insn, and it will handle everything. */
5593 rtx begin_rtx
, end_rtx
;
5595 /* We must not expand to a library call. If we did, any
5596 fallback library function in libgcc that might contain a call to
5597 __builtin___clear_cache() would recurse infinitely. */
5598 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5600 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5604 if (targetm
.have_clear_cache ())
5606 struct expand_operand ops
[2];
5608 begin
= CALL_EXPR_ARG (exp
, 0);
5609 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5611 end
= CALL_EXPR_ARG (exp
, 1);
5612 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5614 create_address_operand (&ops
[0], begin_rtx
);
5615 create_address_operand (&ops
[1], end_rtx
);
5616 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5622 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5625 round_trampoline_addr (rtx tramp
)
5627 rtx temp
, addend
, mask
;
5629 /* If we don't need too much alignment, we'll have been guaranteed
5630 proper alignment by get_trampoline_type. */
5631 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5634 /* Round address up to desired boundary. */
5635 temp
= gen_reg_rtx (Pmode
);
5636 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5637 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5639 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5640 temp
, 0, OPTAB_LIB_WIDEN
);
5641 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5642 temp
, 0, OPTAB_LIB_WIDEN
);
5648 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5650 tree t_tramp
, t_func
, t_chain
;
5651 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5653 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5654 POINTER_TYPE
, VOID_TYPE
))
5657 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5658 t_func
= CALL_EXPR_ARG (exp
, 1);
5659 t_chain
= CALL_EXPR_ARG (exp
, 2);
5661 r_tramp
= expand_normal (t_tramp
);
5662 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5663 MEM_NOTRAP_P (m_tramp
) = 1;
5665 /* If ONSTACK, the TRAMP argument should be the address of a field
5666 within the local function's FRAME decl. Either way, let's see if
5667 we can fill in the MEM_ATTRs for this memory. */
5668 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5669 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5671 /* Creator of a heap trampoline is responsible for making sure the
5672 address is aligned to at least STACK_BOUNDARY. Normally malloc
5673 will ensure this anyhow. */
5674 tmp
= round_trampoline_addr (r_tramp
);
5677 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5678 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5679 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5682 /* The FUNC argument should be the address of the nested function.
5683 Extract the actual function decl to pass to the hook. */
5684 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5685 t_func
= TREE_OPERAND (t_func
, 0);
5686 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5688 r_chain
= expand_normal (t_chain
);
5690 /* Generate insns to initialize the trampoline. */
5691 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5695 trampolines_created
= 1;
5697 if (targetm
.calls
.custom_function_descriptors
!= 0)
5698 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5699 "trampoline generated for nested function %qD", t_func
);
5706 expand_builtin_adjust_trampoline (tree exp
)
5710 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5713 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5714 tramp
= round_trampoline_addr (tramp
);
5715 if (targetm
.calls
.trampoline_adjust_address
)
5716 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5721 /* Expand a call to the builtin descriptor initialization routine.
5722 A descriptor is made up of a couple of pointers to the static
5723 chain and the code entry in this order. */
5726 expand_builtin_init_descriptor (tree exp
)
5728 tree t_descr
, t_func
, t_chain
;
5729 rtx m_descr
, r_descr
, r_func
, r_chain
;
5731 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5735 t_descr
= CALL_EXPR_ARG (exp
, 0);
5736 t_func
= CALL_EXPR_ARG (exp
, 1);
5737 t_chain
= CALL_EXPR_ARG (exp
, 2);
5739 r_descr
= expand_normal (t_descr
);
5740 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5741 MEM_NOTRAP_P (m_descr
) = 1;
5743 r_func
= expand_normal (t_func
);
5744 r_chain
= expand_normal (t_chain
);
5746 /* Generate insns to initialize the descriptor. */
5747 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5748 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5749 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5754 /* Expand a call to the builtin descriptor adjustment routine. */
5757 expand_builtin_adjust_descriptor (tree exp
)
5761 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5764 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5766 /* Unalign the descriptor to allow runtime identification. */
5767 tramp
= plus_constant (ptr_mode
, tramp
,
5768 targetm
.calls
.custom_function_descriptors
);
5770 return force_operand (tramp
, NULL_RTX
);
5773 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5774 function. The function first checks whether the back end provides
5775 an insn to implement signbit for the respective mode. If not, it
5776 checks whether the floating point format of the value is such that
5777 the sign bit can be extracted. If that is not the case, error out.
5778 EXP is the expression that is a call to the builtin function; if
5779 convenient, the result should be placed in TARGET. */
5781 expand_builtin_signbit (tree exp
, rtx target
)
5783 const struct real_format
*fmt
;
5784 scalar_float_mode fmode
;
5785 scalar_int_mode rmode
, imode
;
5788 enum insn_code icode
;
5790 location_t loc
= EXPR_LOCATION (exp
);
5792 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5795 arg
= CALL_EXPR_ARG (exp
, 0);
5796 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5797 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5798 fmt
= REAL_MODE_FORMAT (fmode
);
5800 arg
= builtin_save_expr (arg
);
5802 /* Expand the argument yielding a RTX expression. */
5803 temp
= expand_normal (arg
);
5805 /* Check if the back end provides an insn that handles signbit for the
5807 icode
= optab_handler (signbit_optab
, fmode
);
5808 if (icode
!= CODE_FOR_nothing
)
5810 rtx_insn
*last
= get_last_insn ();
5811 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5812 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5814 delete_insns_since (last
);
5817 /* For floating point formats without a sign bit, implement signbit
5819 bitpos
= fmt
->signbit_ro
;
5822 /* But we can't do this if the format supports signed zero. */
5823 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5825 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5826 build_real (TREE_TYPE (arg
), dconst0
));
5827 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5830 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5832 imode
= int_mode_for_mode (fmode
).require ();
5833 temp
= gen_lowpart (imode
, temp
);
5838 /* Handle targets with different FP word orders. */
5839 if (FLOAT_WORDS_BIG_ENDIAN
)
5840 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5842 word
= bitpos
/ BITS_PER_WORD
;
5843 temp
= operand_subword_force (temp
, word
, fmode
);
5844 bitpos
= bitpos
% BITS_PER_WORD
;
5847 /* Force the intermediate word_mode (or narrower) result into a
5848 register. This avoids attempting to create paradoxical SUBREGs
5849 of floating point modes below. */
5850 temp
= force_reg (imode
, temp
);
5852 /* If the bitpos is within the "result mode" lowpart, the operation
5853 can be implement with a single bitwise AND. Otherwise, we need
5854 a right shift and an AND. */
5856 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5858 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5860 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5861 temp
= gen_lowpart (rmode
, temp
);
5862 temp
= expand_binop (rmode
, and_optab
, temp
,
5863 immed_wide_int_const (mask
, rmode
),
5864 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5868 /* Perform a logical right shift to place the signbit in the least
5869 significant bit, then truncate the result to the desired mode
5870 and mask just this bit. */
5871 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5872 temp
= gen_lowpart (rmode
, temp
);
5873 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5874 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5880 /* Expand fork or exec calls. TARGET is the desired target of the
5881 call. EXP is the call. FN is the
5882 identificator of the actual function. IGNORE is nonzero if the
5883 value is to be ignored. */
5886 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5891 /* If we are not profiling, just call the function. */
5892 if (!profile_arc_flag
)
5895 /* Otherwise call the wrapper. This should be equivalent for the rest of
5896 compiler, so the code does not diverge, and the wrapper may run the
5897 code necessary for keeping the profiling sane. */
5899 switch (DECL_FUNCTION_CODE (fn
))
5902 id
= get_identifier ("__gcov_fork");
5905 case BUILT_IN_EXECL
:
5906 id
= get_identifier ("__gcov_execl");
5909 case BUILT_IN_EXECV
:
5910 id
= get_identifier ("__gcov_execv");
5913 case BUILT_IN_EXECLP
:
5914 id
= get_identifier ("__gcov_execlp");
5917 case BUILT_IN_EXECLE
:
5918 id
= get_identifier ("__gcov_execle");
5921 case BUILT_IN_EXECVP
:
5922 id
= get_identifier ("__gcov_execvp");
5925 case BUILT_IN_EXECVE
:
5926 id
= get_identifier ("__gcov_execve");
5933 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5934 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5935 DECL_EXTERNAL (decl
) = 1;
5936 TREE_PUBLIC (decl
) = 1;
5937 DECL_ARTIFICIAL (decl
) = 1;
5938 TREE_NOTHROW (decl
) = 1;
5939 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5940 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5941 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5942 return expand_call (call
, target
, ignore
);
5947 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5948 the pointer in these functions is void*, the tree optimizers may remove
5949 casts. The mode computed in expand_builtin isn't reliable either, due
5950 to __sync_bool_compare_and_swap.
5952 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5953 group of builtins. This gives us log2 of the mode size. */
5955 static inline machine_mode
5956 get_builtin_sync_mode (int fcode_diff
)
5958 /* The size is not negotiable, so ask not to get BLKmode in return
5959 if the target indicates that a smaller size would be better. */
5960 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5963 /* Expand the memory expression LOC and return the appropriate memory operand
5964 for the builtin_sync operations. */
5967 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5970 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
5971 ? TREE_TYPE (TREE_TYPE (loc
))
5973 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
5975 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
5976 addr
= convert_memory_address (addr_mode
, addr
);
5978 /* Note that we explicitly do not want any alias information for this
5979 memory, so that we kill all other live memories. Otherwise we don't
5980 satisfy the full barrier semantics of the intrinsic. */
5981 mem
= gen_rtx_MEM (mode
, addr
);
5983 set_mem_addr_space (mem
, addr_space
);
5985 mem
= validize_mem (mem
);
5987 /* The alignment needs to be at least according to that of the mode. */
5988 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5989 get_pointer_alignment (loc
)));
5990 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5991 MEM_VOLATILE_P (mem
) = 1;
5996 /* Make sure an argument is in the right mode.
5997 EXP is the tree argument.
5998 MODE is the mode it should be in. */
6001 expand_expr_force_mode (tree exp
, machine_mode mode
)
6004 machine_mode old_mode
;
6006 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6007 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6008 of CONST_INTs, where we know the old_mode only from the call argument. */
6010 old_mode
= GET_MODE (val
);
6011 if (old_mode
== VOIDmode
)
6012 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
6013 val
= convert_modes (mode
, old_mode
, val
, 1);
6018 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6019 EXP is the CALL_EXPR. CODE is the rtx code
6020 that corresponds to the arithmetic or logical operation from the name;
6021 an exception here is that NOT actually means NAND. TARGET is an optional
6022 place for us to store the results; AFTER is true if this is the
6023 fetch_and_xxx form. */
6026 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
6027 enum rtx_code code
, bool after
,
6031 location_t loc
= EXPR_LOCATION (exp
);
6033 if (code
== NOT
&& warn_sync_nand
)
6035 tree fndecl
= get_callee_fndecl (exp
);
6036 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6038 static bool warned_f_a_n
, warned_n_a_f
;
6042 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6043 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6044 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6045 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6046 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6050 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
6051 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6052 warned_f_a_n
= true;
6055 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6056 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6057 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6058 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6059 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6063 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
6064 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6065 warned_n_a_f
= true;
6073 /* Expand the operands. */
6074 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6075 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6077 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
6081 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6082 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6083 true if this is the boolean form. TARGET is a place for us to store the
6084 results; this is NOT optional if IS_BOOL is true. */
6087 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
6088 bool is_bool
, rtx target
)
6090 rtx old_val
, new_val
, mem
;
6093 /* Expand the operands. */
6094 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6095 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6096 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6098 pbool
= poval
= NULL
;
6099 if (target
!= const0_rtx
)
6106 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
6107 false, MEMMODEL_SYNC_SEQ_CST
,
6108 MEMMODEL_SYNC_SEQ_CST
))
6114 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6115 general form is actually an atomic exchange, and some targets only
6116 support a reduced form with the second argument being a constant 1.
6117 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6121 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
6126 /* Expand the operands. */
6127 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6128 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6130 return expand_sync_lock_test_and_set (target
, mem
, val
);
6133 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6136 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
6140 /* Expand the operands. */
6141 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6143 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
6146 /* Given an integer representing an ``enum memmodel'', verify its
6147 correctness and return the memory model enum. */
6149 static enum memmodel
6150 get_memmodel (tree exp
)
6153 unsigned HOST_WIDE_INT val
;
6155 = expansion_point_location_if_in_system_header (input_location
);
6157 /* If the parameter is not a constant, it's a run time value so we'll just
6158 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6159 if (TREE_CODE (exp
) != INTEGER_CST
)
6160 return MEMMODEL_SEQ_CST
;
6162 op
= expand_normal (exp
);
6165 if (targetm
.memmodel_check
)
6166 val
= targetm
.memmodel_check (val
);
6167 else if (val
& ~MEMMODEL_MASK
)
6169 warning_at (loc
, OPT_Winvalid_memory_model
,
6170 "unknown architecture specifier in memory model to builtin");
6171 return MEMMODEL_SEQ_CST
;
6174 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6175 if (memmodel_base (val
) >= MEMMODEL_LAST
)
6177 warning_at (loc
, OPT_Winvalid_memory_model
,
6178 "invalid memory model argument to builtin");
6179 return MEMMODEL_SEQ_CST
;
6182 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6183 be conservative and promote consume to acquire. */
6184 if (val
== MEMMODEL_CONSUME
)
6185 val
= MEMMODEL_ACQUIRE
;
6187 return (enum memmodel
) val
;
6190 /* Expand the __atomic_exchange intrinsic:
6191 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6192 EXP is the CALL_EXPR.
6193 TARGET is an optional place for us to store the results. */
6196 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
6199 enum memmodel model
;
6201 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6203 if (!flag_inline_atomics
)
6206 /* Expand the operands. */
6207 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6208 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6210 return expand_atomic_exchange (target
, mem
, val
, model
);
6213 /* Expand the __atomic_compare_exchange intrinsic:
6214 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6215 TYPE desired, BOOL weak,
6216 enum memmodel success,
6217 enum memmodel failure)
6218 EXP is the CALL_EXPR.
6219 TARGET is an optional place for us to store the results. */
6222 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
6225 rtx expect
, desired
, mem
, oldval
;
6226 rtx_code_label
*label
;
6227 enum memmodel success
, failure
;
6231 = expansion_point_location_if_in_system_header (input_location
);
6233 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
6234 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
6236 if (failure
> success
)
6238 warning_at (loc
, OPT_Winvalid_memory_model
,
6239 "failure memory model cannot be stronger than success "
6240 "memory model for %<__atomic_compare_exchange%>");
6241 success
= MEMMODEL_SEQ_CST
;
6244 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6246 warning_at (loc
, OPT_Winvalid_memory_model
,
6247 "invalid failure memory model for "
6248 "%<__atomic_compare_exchange%>");
6249 failure
= MEMMODEL_SEQ_CST
;
6250 success
= MEMMODEL_SEQ_CST
;
6254 if (!flag_inline_atomics
)
6257 /* Expand the operands. */
6258 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6260 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6261 expect
= convert_memory_address (Pmode
, expect
);
6262 expect
= gen_rtx_MEM (mode
, expect
);
6263 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6265 weak
= CALL_EXPR_ARG (exp
, 3);
6267 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
6270 if (target
== const0_rtx
)
6273 /* Lest the rtl backend create a race condition with an imporoper store
6274 to memory, always create a new pseudo for OLDVAL. */
6277 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
6278 is_weak
, success
, failure
))
6281 /* Conditionally store back to EXPECT, lest we create a race condition
6282 with an improper store to memory. */
6283 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6284 the normal case where EXPECT is totally private, i.e. a register. At
6285 which point the store can be unconditional. */
6286 label
= gen_label_rtx ();
6287 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
6288 GET_MODE (target
), 1, label
);
6289 emit_move_insn (expect
, oldval
);
6295 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6296 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6297 call. The weak parameter must be dropped to match the expected parameter
6298 list and the expected argument changed from value to pointer to memory
6302 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
6305 vec
<tree
, va_gc
> *vec
;
6308 vec
->quick_push (gimple_call_arg (call
, 0));
6309 tree expected
= gimple_call_arg (call
, 1);
6310 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
6311 TREE_TYPE (expected
));
6312 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
6314 emit_move_insn (x
, expd
);
6315 tree v
= make_tree (TREE_TYPE (expected
), x
);
6316 vec
->quick_push (build1 (ADDR_EXPR
,
6317 build_pointer_type (TREE_TYPE (expected
)), v
));
6318 vec
->quick_push (gimple_call_arg (call
, 2));
6319 /* Skip the boolean weak parameter. */
6320 for (z
= 4; z
< 6; z
++)
6321 vec
->quick_push (gimple_call_arg (call
, z
));
6322 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6323 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
6324 gcc_assert (bytes_log2
< 5);
6325 built_in_function fncode
6326 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6328 tree fndecl
= builtin_decl_explicit (fncode
);
6329 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
6331 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
6332 tree lhs
= gimple_call_lhs (call
);
6333 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
6336 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6337 if (GET_MODE (boolret
) != mode
)
6338 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6339 x
= force_reg (mode
, x
);
6340 write_complex_part (target
, boolret
, true);
6341 write_complex_part (target
, x
, false);
6345 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6348 expand_ifn_atomic_compare_exchange (gcall
*call
)
6350 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6351 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6352 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6353 rtx expect
, desired
, mem
, oldval
, boolret
;
6354 enum memmodel success
, failure
;
6358 = expansion_point_location_if_in_system_header (gimple_location (call
));
6360 success
= get_memmodel (gimple_call_arg (call
, 4));
6361 failure
= get_memmodel (gimple_call_arg (call
, 5));
6363 if (failure
> success
)
6365 warning_at (loc
, OPT_Winvalid_memory_model
,
6366 "failure memory model cannot be stronger than success "
6367 "memory model for %<__atomic_compare_exchange%>");
6368 success
= MEMMODEL_SEQ_CST
;
6371 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6373 warning_at (loc
, OPT_Winvalid_memory_model
,
6374 "invalid failure memory model for "
6375 "%<__atomic_compare_exchange%>");
6376 failure
= MEMMODEL_SEQ_CST
;
6377 success
= MEMMODEL_SEQ_CST
;
6380 if (!flag_inline_atomics
)
6382 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6386 /* Expand the operands. */
6387 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6389 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6390 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6392 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6397 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6398 is_weak
, success
, failure
))
6400 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6404 lhs
= gimple_call_lhs (call
);
6407 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6408 if (GET_MODE (boolret
) != mode
)
6409 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6410 write_complex_part (target
, boolret
, true);
6411 write_complex_part (target
, oldval
, false);
6415 /* Expand the __atomic_load intrinsic:
6416 TYPE __atomic_load (TYPE *object, enum memmodel)
6417 EXP is the CALL_EXPR.
6418 TARGET is an optional place for us to store the results. */
6421 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6424 enum memmodel model
;
6426 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6427 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6430 = expansion_point_location_if_in_system_header (input_location
);
6431 warning_at (loc
, OPT_Winvalid_memory_model
,
6432 "invalid memory model for %<__atomic_load%>");
6433 model
= MEMMODEL_SEQ_CST
;
6436 if (!flag_inline_atomics
)
6439 /* Expand the operand. */
6440 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6442 return expand_atomic_load (target
, mem
, model
);
6446 /* Expand the __atomic_store intrinsic:
6447 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6448 EXP is the CALL_EXPR.
6449 TARGET is an optional place for us to store the results. */
6452 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6455 enum memmodel model
;
6457 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6458 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6459 || is_mm_release (model
)))
6462 = expansion_point_location_if_in_system_header (input_location
);
6463 warning_at (loc
, OPT_Winvalid_memory_model
,
6464 "invalid memory model for %<__atomic_store%>");
6465 model
= MEMMODEL_SEQ_CST
;
6468 if (!flag_inline_atomics
)
6471 /* Expand the operands. */
6472 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6473 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6475 return expand_atomic_store (mem
, val
, model
, false);
6478 /* Expand the __atomic_fetch_XXX intrinsic:
6479 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6480 EXP is the CALL_EXPR.
6481 TARGET is an optional place for us to store the results.
6482 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6483 FETCH_AFTER is true if returning the result of the operation.
6484 FETCH_AFTER is false if returning the value before the operation.
6485 IGNORE is true if the result is not used.
6486 EXT_CALL is the correct builtin for an external call if this cannot be
6487 resolved to an instruction sequence. */
6490 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6491 enum rtx_code code
, bool fetch_after
,
6492 bool ignore
, enum built_in_function ext_call
)
6495 enum memmodel model
;
6499 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6501 /* Expand the operands. */
6502 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6503 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6505 /* Only try generating instructions if inlining is turned on. */
6506 if (flag_inline_atomics
)
6508 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6513 /* Return if a different routine isn't needed for the library call. */
6514 if (ext_call
== BUILT_IN_NONE
)
6517 /* Change the call to the specified function. */
6518 fndecl
= get_callee_fndecl (exp
);
6519 addr
= CALL_EXPR_FN (exp
);
6522 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6523 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6525 /* If we will emit code after the call, the call can not be a tail call.
6526 If it is emitted as a tail call, a barrier is emitted after it, and
6527 then all trailing code is removed. */
6529 CALL_EXPR_TAILCALL (exp
) = 0;
6531 /* Expand the call here so we can emit trailing code. */
6532 ret
= expand_call (exp
, target
, ignore
);
6534 /* Replace the original function just in case it matters. */
6535 TREE_OPERAND (addr
, 0) = fndecl
;
6537 /* Then issue the arithmetic correction to return the right result. */
6542 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6544 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6547 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6553 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6556 expand_ifn_atomic_bit_test_and (gcall
*call
)
6558 tree ptr
= gimple_call_arg (call
, 0);
6559 tree bit
= gimple_call_arg (call
, 1);
6560 tree flag
= gimple_call_arg (call
, 2);
6561 tree lhs
= gimple_call_lhs (call
);
6562 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6563 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6566 struct expand_operand ops
[5];
6568 gcc_assert (flag_inline_atomics
);
6570 if (gimple_call_num_args (call
) == 4)
6571 model
= get_memmodel (gimple_call_arg (call
, 3));
6573 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6574 rtx val
= expand_expr_force_mode (bit
, mode
);
6576 switch (gimple_call_internal_fn (call
))
6578 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6580 optab
= atomic_bit_test_and_set_optab
;
6582 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6584 optab
= atomic_bit_test_and_complement_optab
;
6586 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6588 optab
= atomic_bit_test_and_reset_optab
;
6594 if (lhs
== NULL_TREE
)
6596 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6597 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6599 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6600 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6604 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6605 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6606 gcc_assert (icode
!= CODE_FOR_nothing
);
6607 create_output_operand (&ops
[0], target
, mode
);
6608 create_fixed_operand (&ops
[1], mem
);
6609 create_convert_operand_to (&ops
[2], val
, mode
, true);
6610 create_integer_operand (&ops
[3], model
);
6611 create_integer_operand (&ops
[4], integer_onep (flag
));
6612 if (maybe_expand_insn (icode
, 5, ops
))
6616 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6617 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6620 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6621 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6622 code
, model
, false);
6623 if (integer_onep (flag
))
6625 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6626 NULL_RTX
, true, OPTAB_DIRECT
);
6627 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6628 true, OPTAB_DIRECT
);
6631 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6633 if (result
!= target
)
6634 emit_move_insn (target
, result
);
6637 /* Expand an atomic clear operation.
6638 void _atomic_clear (BOOL *obj, enum memmodel)
6639 EXP is the call expression. */
6642 expand_builtin_atomic_clear (tree exp
)
6646 enum memmodel model
;
6648 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6649 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6650 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6652 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6655 = expansion_point_location_if_in_system_header (input_location
);
6656 warning_at (loc
, OPT_Winvalid_memory_model
,
6657 "invalid memory model for %<__atomic_store%>");
6658 model
= MEMMODEL_SEQ_CST
;
6661 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6662 Failing that, a store is issued by __atomic_store. The only way this can
6663 fail is if the bool type is larger than a word size. Unlikely, but
6664 handle it anyway for completeness. Assume a single threaded model since
6665 there is no atomic support in this case, and no barriers are required. */
6666 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6668 emit_move_insn (mem
, const0_rtx
);
6672 /* Expand an atomic test_and_set operation.
6673 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6674 EXP is the call expression. */
6677 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6680 enum memmodel model
;
6683 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6684 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6685 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6687 return expand_atomic_test_and_set (target
, mem
, model
);
6691 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6692 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6695 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6699 unsigned int mode_align
, type_align
;
6701 if (TREE_CODE (arg0
) != INTEGER_CST
)
6704 /* We need a corresponding integer mode for the access to be lock-free. */
6705 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6706 if (!int_mode_for_size (size
, 0).exists (&mode
))
6707 return boolean_false_node
;
6709 mode_align
= GET_MODE_ALIGNMENT (mode
);
6711 if (TREE_CODE (arg1
) == INTEGER_CST
)
6713 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6715 /* Either this argument is null, or it's a fake pointer encoding
6716 the alignment of the object. */
6717 val
= least_bit_hwi (val
);
6718 val
*= BITS_PER_UNIT
;
6720 if (val
== 0 || mode_align
< val
)
6721 type_align
= mode_align
;
6727 tree ttype
= TREE_TYPE (arg1
);
6729 /* This function is usually invoked and folded immediately by the front
6730 end before anything else has a chance to look at it. The pointer
6731 parameter at this point is usually cast to a void *, so check for that
6732 and look past the cast. */
6733 if (CONVERT_EXPR_P (arg1
)
6734 && POINTER_TYPE_P (ttype
)
6735 && VOID_TYPE_P (TREE_TYPE (ttype
))
6736 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6737 arg1
= TREE_OPERAND (arg1
, 0);
6739 ttype
= TREE_TYPE (arg1
);
6740 gcc_assert (POINTER_TYPE_P (ttype
));
6742 /* Get the underlying type of the object. */
6743 ttype
= TREE_TYPE (ttype
);
6744 type_align
= TYPE_ALIGN (ttype
);
6747 /* If the object has smaller alignment, the lock free routines cannot
6749 if (type_align
< mode_align
)
6750 return boolean_false_node
;
6752 /* Check if a compare_and_swap pattern exists for the mode which represents
6753 the required size. The pattern is not allowed to fail, so the existence
6754 of the pattern indicates support is present. Also require that an
6755 atomic load exists for the required size. */
6756 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6757 return boolean_true_node
;
6759 return boolean_false_node
;
6762 /* Return true if the parameters to call EXP represent an object which will
6763 always generate lock free instructions. The first argument represents the
6764 size of the object, and the second parameter is a pointer to the object
6765 itself. If NULL is passed for the object, then the result is based on
6766 typical alignment for an object of the specified size. Otherwise return
6770 expand_builtin_atomic_always_lock_free (tree exp
)
6773 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6774 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6776 if (TREE_CODE (arg0
) != INTEGER_CST
)
6778 error ("non-constant argument 1 to __atomic_always_lock_free");
6782 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6783 if (size
== boolean_true_node
)
6788 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6789 is lock free on this architecture. */
6792 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6794 if (!flag_inline_atomics
)
6797 /* If it isn't always lock free, don't generate a result. */
6798 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6799 return boolean_true_node
;
6804 /* Return true if the parameters to call EXP represent an object which will
6805 always generate lock free instructions. The first argument represents the
6806 size of the object, and the second parameter is a pointer to the object
6807 itself. If NULL is passed for the object, then the result is based on
6808 typical alignment for an object of the specified size. Otherwise return
6812 expand_builtin_atomic_is_lock_free (tree exp
)
6815 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6816 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6818 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6820 error ("non-integer argument 1 to __atomic_is_lock_free");
6824 if (!flag_inline_atomics
)
6827 /* If the value is known at compile time, return the RTX for it. */
6828 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6829 if (size
== boolean_true_node
)
6835 /* Expand the __atomic_thread_fence intrinsic:
6836 void __atomic_thread_fence (enum memmodel)
6837 EXP is the CALL_EXPR. */
6840 expand_builtin_atomic_thread_fence (tree exp
)
6842 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6843 expand_mem_thread_fence (model
);
6846 /* Expand the __atomic_signal_fence intrinsic:
6847 void __atomic_signal_fence (enum memmodel)
6848 EXP is the CALL_EXPR. */
6851 expand_builtin_atomic_signal_fence (tree exp
)
6853 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6854 expand_mem_signal_fence (model
);
6857 /* Expand the __sync_synchronize intrinsic. */
6860 expand_builtin_sync_synchronize (void)
6862 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6866 expand_builtin_thread_pointer (tree exp
, rtx target
)
6868 enum insn_code icode
;
6869 if (!validate_arglist (exp
, VOID_TYPE
))
6871 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6872 if (icode
!= CODE_FOR_nothing
)
6874 struct expand_operand op
;
6875 /* If the target is not sutitable then create a new target. */
6876 if (target
== NULL_RTX
6878 || GET_MODE (target
) != Pmode
)
6879 target
= gen_reg_rtx (Pmode
);
6880 create_output_operand (&op
, target
, Pmode
);
6881 expand_insn (icode
, 1, &op
);
6884 error ("__builtin_thread_pointer is not supported on this target");
6889 expand_builtin_set_thread_pointer (tree exp
)
6891 enum insn_code icode
;
6892 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6894 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6895 if (icode
!= CODE_FOR_nothing
)
6897 struct expand_operand op
;
6898 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6899 Pmode
, EXPAND_NORMAL
);
6900 create_input_operand (&op
, val
, Pmode
);
6901 expand_insn (icode
, 1, &op
);
6904 error ("__builtin_set_thread_pointer is not supported on this target");
6908 /* Emit code to restore the current value of stack. */
6911 expand_stack_restore (tree var
)
6914 rtx sa
= expand_normal (var
);
6916 sa
= convert_memory_address (Pmode
, sa
);
6918 prev
= get_last_insn ();
6919 emit_stack_restore (SAVE_BLOCK
, sa
);
6921 record_new_stack_level ();
6923 fixup_args_size_notes (prev
, get_last_insn (), 0);
6926 /* Emit code to save the current value of stack. */
6929 expand_stack_save (void)
6933 emit_stack_save (SAVE_BLOCK
, &ret
);
6937 /* Emit code to get the openacc gang, worker or vector id or size. */
6940 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
6943 rtx fallback_retval
;
6944 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
6945 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
6947 case BUILT_IN_GOACC_PARLEVEL_ID
:
6948 name
= "__builtin_goacc_parlevel_id";
6949 fallback_retval
= const0_rtx
;
6950 gen_fn
= targetm
.gen_oacc_dim_pos
;
6952 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
6953 name
= "__builtin_goacc_parlevel_size";
6954 fallback_retval
= const1_rtx
;
6955 gen_fn
= targetm
.gen_oacc_dim_size
;
6961 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
6963 error ("%qs only supported in OpenACC code", name
);
6967 tree arg
= CALL_EXPR_ARG (exp
, 0);
6968 if (TREE_CODE (arg
) != INTEGER_CST
)
6970 error ("non-constant argument 0 to %qs", name
);
6974 int dim
= TREE_INT_CST_LOW (arg
);
6978 case GOMP_DIM_WORKER
:
6979 case GOMP_DIM_VECTOR
:
6982 error ("illegal argument 0 to %qs", name
);
6989 if (target
== NULL_RTX
)
6990 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6992 if (!targetm
.have_oacc_dim_size ())
6994 emit_move_insn (target
, fallback_retval
);
6998 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
6999 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
7001 emit_move_insn (target
, reg
);
7006 /* Expand a string compare operation using a sequence of char comparison
7007 to get rid of the calling overhead, with result going to TARGET if
7010 VAR_STR is the variable string source;
7011 CONST_STR is the constant string source;
7012 LENGTH is the number of chars to compare;
7013 CONST_STR_N indicates which source string is the constant string;
7014 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7016 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7018 target = (int) (unsigned char) var_str[0]
7019 - (int) (unsigned char) const_str[0];
7023 target = (int) (unsigned char) var_str[length - 2]
7024 - (int) (unsigned char) const_str[length - 2];
7027 target = (int) (unsigned char) var_str[length - 1]
7028 - (int) (unsigned char) const_str[length - 1];
7033 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
7034 unsigned HOST_WIDE_INT length
,
7035 int const_str_n
, machine_mode mode
)
7037 HOST_WIDE_INT offset
= 0;
7039 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
7040 rtx var_rtx
= NULL_RTX
;
7041 rtx const_rtx
= NULL_RTX
;
7042 rtx result
= target
? target
: gen_reg_rtx (mode
);
7043 rtx_code_label
*ne_label
= gen_label_rtx ();
7044 tree unit_type_node
= unsigned_char_type_node
;
7045 scalar_int_mode unit_mode
7046 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
7050 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
7053 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
7054 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
7055 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
7056 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
7058 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
7059 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
7060 result
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
7061 result
, 1, OPTAB_WIDEN
);
7063 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
7064 mode
, true, ne_label
);
7065 offset
+= GET_MODE_SIZE (unit_mode
);
7068 emit_label (ne_label
);
7069 rtx_insn
*insns
= get_insns ();
7076 /* Inline expansion a call to str(n)cmp, with result going to
7077 TARGET if that's convenient.
7078 If the call is not been inlined, return NULL_RTX. */
7080 inline_expand_builtin_string_cmp (tree exp
, rtx target
)
7082 tree fndecl
= get_callee_fndecl (exp
);
7083 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7084 unsigned HOST_WIDE_INT length
= 0;
7085 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
7087 /* Do NOT apply this inlining expansion when optimizing for size or
7088 optimization level below 2. */
7089 if (optimize
< 2 || optimize_insn_for_size_p ())
7092 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
7093 || fcode
== BUILT_IN_STRNCMP
7094 || fcode
== BUILT_IN_MEMCMP
);
7096 /* On a target where the type of the call (int) has same or narrower presicion
7097 than unsigned char, give up the inlining expansion. */
7098 if (TYPE_PRECISION (unsigned_char_type_node
)
7099 >= TYPE_PRECISION (TREE_TYPE (exp
)))
7102 tree arg1
= CALL_EXPR_ARG (exp
, 0);
7103 tree arg2
= CALL_EXPR_ARG (exp
, 1);
7104 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
7106 unsigned HOST_WIDE_INT len1
= 0;
7107 unsigned HOST_WIDE_INT len2
= 0;
7108 unsigned HOST_WIDE_INT len3
= 0;
7110 const char *src_str1
= c_getstr (arg1
, &len1
);
7111 const char *src_str2
= c_getstr (arg2
, &len2
);
7113 /* If neither strings is constant string, the call is not qualify. */
7114 if (!src_str1
&& !src_str2
)
7117 /* For strncmp, if the length is not a const, not qualify. */
7118 if (is_ncmp
&& !tree_fits_uhwi_p (len3_tree
))
7121 int const_str_n
= 0;
7126 else if (len2
> len1
)
7131 gcc_checking_assert (const_str_n
> 0);
7132 length
= (const_str_n
== 1) ? len1
: len2
;
7134 if (is_ncmp
&& (len3
= tree_to_uhwi (len3_tree
)) < length
)
7137 /* If the length of the comparision is larger than the threshold,
7139 if (length
> (unsigned HOST_WIDE_INT
)
7140 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH
))
7143 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
7145 /* Now, start inline expansion the call. */
7146 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
7147 (const_str_n
== 1) ? src_str1
: src_str2
, length
,
7151 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7152 represents the size of the first argument to that call, or VOIDmode
7153 if the argument is a pointer. IGNORE will be true if the result
7156 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
7160 unsigned nargs
= call_expr_nargs (exp
);
7162 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7164 if (mode
== VOIDmode
)
7166 mode
= TYPE_MODE (TREE_TYPE (arg0
));
7167 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
7170 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7172 /* An optional second argument can be used as a failsafe value on
7173 some machines. If it isn't present, then the failsafe value is
7177 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7178 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7181 failsafe
= const0_rtx
;
7183 /* If the result isn't used, the behavior is undefined. It would be
7184 nice to emit a warning here, but path splitting means this might
7185 happen with legitimate code. So simply drop the builtin
7186 expansion in that case; we've handled any side-effects above. */
7190 /* If we don't have a suitable target, create one to hold the result. */
7191 if (target
== NULL
|| GET_MODE (target
) != mode
)
7192 target
= gen_reg_rtx (mode
);
7194 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
7195 val
= convert_modes (mode
, VOIDmode
, val
, false);
7197 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
7200 /* Expand an expression EXP that calls a built-in function,
7201 with result going to TARGET if that's convenient
7202 (and in mode MODE if that's convenient).
7203 SUBTARGET may be used as the target for computing one of EXP's operands.
7204 IGNORE is nonzero if the value is to be ignored. */
7207 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
7210 tree fndecl
= get_callee_fndecl (exp
);
7211 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7212 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
7215 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7216 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7218 /* When ASan is enabled, we don't want to expand some memory/string
7219 builtins and rely on libsanitizer's hooks. This allows us to avoid
7220 redundant checks and be sure, that possible overflow will be detected
7223 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
7224 return expand_call (exp
, target
, ignore
);
7226 /* When not optimizing, generate calls to library functions for a certain
7229 && !called_as_built_in (fndecl
)
7230 && fcode
!= BUILT_IN_FORK
7231 && fcode
!= BUILT_IN_EXECL
7232 && fcode
!= BUILT_IN_EXECV
7233 && fcode
!= BUILT_IN_EXECLP
7234 && fcode
!= BUILT_IN_EXECLE
7235 && fcode
!= BUILT_IN_EXECVP
7236 && fcode
!= BUILT_IN_EXECVE
7237 && !ALLOCA_FUNCTION_CODE_P (fcode
)
7238 && fcode
!= BUILT_IN_FREE
)
7239 return expand_call (exp
, target
, ignore
);
7241 /* The built-in function expanders test for target == const0_rtx
7242 to determine whether the function's result will be ignored. */
7244 target
= const0_rtx
;
7246 /* If the result of a pure or const built-in function is ignored, and
7247 none of its arguments are volatile, we can avoid expanding the
7248 built-in call and just evaluate the arguments for side-effects. */
7249 if (target
== const0_rtx
7250 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
7251 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
7253 bool volatilep
= false;
7255 call_expr_arg_iterator iter
;
7257 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7258 if (TREE_THIS_VOLATILE (arg
))
7266 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7267 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7274 CASE_FLT_FN (BUILT_IN_FABS
):
7275 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
7276 case BUILT_IN_FABSD32
:
7277 case BUILT_IN_FABSD64
:
7278 case BUILT_IN_FABSD128
:
7279 target
= expand_builtin_fabs (exp
, target
, subtarget
);
7284 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
7285 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
7286 target
= expand_builtin_copysign (exp
, target
, subtarget
);
7291 /* Just do a normal library call if we were unable to fold
7293 CASE_FLT_FN (BUILT_IN_CABS
):
7296 CASE_FLT_FN (BUILT_IN_FMA
):
7297 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
7298 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
7303 CASE_FLT_FN (BUILT_IN_ILOGB
):
7304 if (! flag_unsafe_math_optimizations
)
7307 CASE_FLT_FN (BUILT_IN_ISINF
):
7308 CASE_FLT_FN (BUILT_IN_FINITE
):
7309 case BUILT_IN_ISFINITE
:
7310 case BUILT_IN_ISNORMAL
:
7311 target
= expand_builtin_interclass_mathfn (exp
, target
);
7316 CASE_FLT_FN (BUILT_IN_ICEIL
):
7317 CASE_FLT_FN (BUILT_IN_LCEIL
):
7318 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7319 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7320 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7321 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7322 target
= expand_builtin_int_roundingfn (exp
, target
);
7327 CASE_FLT_FN (BUILT_IN_IRINT
):
7328 CASE_FLT_FN (BUILT_IN_LRINT
):
7329 CASE_FLT_FN (BUILT_IN_LLRINT
):
7330 CASE_FLT_FN (BUILT_IN_IROUND
):
7331 CASE_FLT_FN (BUILT_IN_LROUND
):
7332 CASE_FLT_FN (BUILT_IN_LLROUND
):
7333 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
7338 CASE_FLT_FN (BUILT_IN_POWI
):
7339 target
= expand_builtin_powi (exp
, target
);
7344 CASE_FLT_FN (BUILT_IN_CEXPI
):
7345 target
= expand_builtin_cexpi (exp
, target
);
7346 gcc_assert (target
);
7349 CASE_FLT_FN (BUILT_IN_SIN
):
7350 CASE_FLT_FN (BUILT_IN_COS
):
7351 if (! flag_unsafe_math_optimizations
)
7353 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
7358 CASE_FLT_FN (BUILT_IN_SINCOS
):
7359 if (! flag_unsafe_math_optimizations
)
7361 target
= expand_builtin_sincos (exp
);
7366 case BUILT_IN_APPLY_ARGS
:
7367 return expand_builtin_apply_args ();
7369 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7370 FUNCTION with a copy of the parameters described by
7371 ARGUMENTS, and ARGSIZE. It returns a block of memory
7372 allocated on the stack into which is stored all the registers
7373 that might possibly be used for returning the result of a
7374 function. ARGUMENTS is the value returned by
7375 __builtin_apply_args. ARGSIZE is the number of bytes of
7376 arguments that must be copied. ??? How should this value be
7377 computed? We'll also need a safe worst case value for varargs
7379 case BUILT_IN_APPLY
:
7380 if (!validate_arglist (exp
, POINTER_TYPE
,
7381 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
7382 && !validate_arglist (exp
, REFERENCE_TYPE
,
7383 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7389 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
7390 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
7391 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
7393 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7396 /* __builtin_return (RESULT) causes the function to return the
7397 value described by RESULT. RESULT is address of the block of
7398 memory returned by __builtin_apply. */
7399 case BUILT_IN_RETURN
:
7400 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7401 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
7404 case BUILT_IN_SAVEREGS
:
7405 return expand_builtin_saveregs ();
7407 case BUILT_IN_VA_ARG_PACK
:
7408 /* All valid uses of __builtin_va_arg_pack () are removed during
7410 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
7413 case BUILT_IN_VA_ARG_PACK_LEN
:
7414 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7416 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
7419 /* Return the address of the first anonymous stack arg. */
7420 case BUILT_IN_NEXT_ARG
:
7421 if (fold_builtin_next_arg (exp
, false))
7423 return expand_builtin_next_arg ();
7425 case BUILT_IN_CLEAR_CACHE
:
7426 target
= expand_builtin___clear_cache (exp
);
7431 case BUILT_IN_CLASSIFY_TYPE
:
7432 return expand_builtin_classify_type (exp
);
7434 case BUILT_IN_CONSTANT_P
:
7437 case BUILT_IN_FRAME_ADDRESS
:
7438 case BUILT_IN_RETURN_ADDRESS
:
7439 return expand_builtin_frame_address (fndecl
, exp
);
7441 /* Returns the address of the area where the structure is returned.
7443 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
7444 if (call_expr_nargs (exp
) != 0
7445 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
7446 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
7449 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
7451 CASE_BUILT_IN_ALLOCA
:
7452 target
= expand_builtin_alloca (exp
);
7457 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
7458 return expand_asan_emit_allocas_unpoison (exp
);
7460 case BUILT_IN_STACK_SAVE
:
7461 return expand_stack_save ();
7463 case BUILT_IN_STACK_RESTORE
:
7464 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
7467 case BUILT_IN_BSWAP16
:
7468 case BUILT_IN_BSWAP32
:
7469 case BUILT_IN_BSWAP64
:
7470 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
7475 CASE_INT_FN (BUILT_IN_FFS
):
7476 target
= expand_builtin_unop (target_mode
, exp
, target
,
7477 subtarget
, ffs_optab
);
7482 CASE_INT_FN (BUILT_IN_CLZ
):
7483 target
= expand_builtin_unop (target_mode
, exp
, target
,
7484 subtarget
, clz_optab
);
7489 CASE_INT_FN (BUILT_IN_CTZ
):
7490 target
= expand_builtin_unop (target_mode
, exp
, target
,
7491 subtarget
, ctz_optab
);
7496 CASE_INT_FN (BUILT_IN_CLRSB
):
7497 target
= expand_builtin_unop (target_mode
, exp
, target
,
7498 subtarget
, clrsb_optab
);
7503 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7504 target
= expand_builtin_unop (target_mode
, exp
, target
,
7505 subtarget
, popcount_optab
);
7510 CASE_INT_FN (BUILT_IN_PARITY
):
7511 target
= expand_builtin_unop (target_mode
, exp
, target
,
7512 subtarget
, parity_optab
);
7517 case BUILT_IN_STRLEN
:
7518 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7523 case BUILT_IN_STRNLEN
:
7524 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
7529 case BUILT_IN_STRCAT
:
7530 target
= expand_builtin_strcat (exp
, target
);
7535 case BUILT_IN_STRCPY
:
7536 target
= expand_builtin_strcpy (exp
, target
);
7541 case BUILT_IN_STRNCAT
:
7542 target
= expand_builtin_strncat (exp
, target
);
7547 case BUILT_IN_STRNCPY
:
7548 target
= expand_builtin_strncpy (exp
, target
);
7553 case BUILT_IN_STPCPY
:
7554 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7559 case BUILT_IN_STPNCPY
:
7560 target
= expand_builtin_stpncpy (exp
, target
);
7565 case BUILT_IN_MEMCHR
:
7566 target
= expand_builtin_memchr (exp
, target
);
7571 case BUILT_IN_MEMCPY
:
7572 target
= expand_builtin_memcpy (exp
, target
);
7577 case BUILT_IN_MEMMOVE
:
7578 target
= expand_builtin_memmove (exp
, target
);
7583 case BUILT_IN_MEMPCPY
:
7584 target
= expand_builtin_mempcpy (exp
, target
);
7589 case BUILT_IN_MEMSET
:
7590 target
= expand_builtin_memset (exp
, target
, mode
);
7595 case BUILT_IN_BZERO
:
7596 target
= expand_builtin_bzero (exp
);
7601 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7602 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7603 when changing it to a strcmp call. */
7604 case BUILT_IN_STRCMP_EQ
:
7605 target
= expand_builtin_memcmp (exp
, target
, true);
7609 /* Change this call back to a BUILT_IN_STRCMP. */
7610 TREE_OPERAND (exp
, 1)
7611 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
7613 /* Delete the last parameter. */
7615 vec
<tree
, va_gc
> *arg_vec
;
7616 vec_alloc (arg_vec
, 2);
7617 for (i
= 0; i
< 2; i
++)
7618 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
7619 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
7622 case BUILT_IN_STRCMP
:
7623 target
= expand_builtin_strcmp (exp
, target
);
7628 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7629 back to a BUILT_IN_STRNCMP. */
7630 case BUILT_IN_STRNCMP_EQ
:
7631 target
= expand_builtin_memcmp (exp
, target
, true);
7635 /* Change it back to a BUILT_IN_STRNCMP. */
7636 TREE_OPERAND (exp
, 1)
7637 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
7640 case BUILT_IN_STRNCMP
:
7641 target
= expand_builtin_strncmp (exp
, target
, mode
);
7647 case BUILT_IN_MEMCMP
:
7648 case BUILT_IN_MEMCMP_EQ
:
7649 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7652 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7654 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7655 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7659 case BUILT_IN_SETJMP
:
7660 /* This should have been lowered to the builtins below. */
7663 case BUILT_IN_SETJMP_SETUP
:
7664 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7665 and the receiver label. */
7666 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7668 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7669 VOIDmode
, EXPAND_NORMAL
);
7670 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7671 rtx_insn
*label_r
= label_rtx (label
);
7673 /* This is copied from the handling of non-local gotos. */
7674 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7675 nonlocal_goto_handler_labels
7676 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7677 nonlocal_goto_handler_labels
);
7678 /* ??? Do not let expand_label treat us as such since we would
7679 not want to be both on the list of non-local labels and on
7680 the list of forced labels. */
7681 FORCED_LABEL (label
) = 0;
7686 case BUILT_IN_SETJMP_RECEIVER
:
7687 /* __builtin_setjmp_receiver is passed the receiver label. */
7688 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7690 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7691 rtx_insn
*label_r
= label_rtx (label
);
7693 expand_builtin_setjmp_receiver (label_r
);
7698 /* __builtin_longjmp is passed a pointer to an array of five words.
7699 It's similar to the C library longjmp function but works with
7700 __builtin_setjmp above. */
7701 case BUILT_IN_LONGJMP
:
7702 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7704 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7705 VOIDmode
, EXPAND_NORMAL
);
7706 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7708 if (value
!= const1_rtx
)
7710 error ("%<__builtin_longjmp%> second argument must be 1");
7714 expand_builtin_longjmp (buf_addr
, value
);
7719 case BUILT_IN_NONLOCAL_GOTO
:
7720 target
= expand_builtin_nonlocal_goto (exp
);
7725 /* This updates the setjmp buffer that is its argument with the value
7726 of the current stack pointer. */
7727 case BUILT_IN_UPDATE_SETJMP_BUF
:
7728 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7731 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7733 expand_builtin_update_setjmp_buf (buf_addr
);
7739 expand_builtin_trap ();
7742 case BUILT_IN_UNREACHABLE
:
7743 expand_builtin_unreachable ();
7746 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7747 case BUILT_IN_SIGNBITD32
:
7748 case BUILT_IN_SIGNBITD64
:
7749 case BUILT_IN_SIGNBITD128
:
7750 target
= expand_builtin_signbit (exp
, target
);
7755 /* Various hooks for the DWARF 2 __throw routine. */
7756 case BUILT_IN_UNWIND_INIT
:
7757 expand_builtin_unwind_init ();
7759 case BUILT_IN_DWARF_CFA
:
7760 return virtual_cfa_rtx
;
7761 #ifdef DWARF2_UNWIND_INFO
7762 case BUILT_IN_DWARF_SP_COLUMN
:
7763 return expand_builtin_dwarf_sp_column ();
7764 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7765 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7768 case BUILT_IN_FROB_RETURN_ADDR
:
7769 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7770 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7771 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7772 case BUILT_IN_EH_RETURN
:
7773 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7774 CALL_EXPR_ARG (exp
, 1));
7776 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7777 return expand_builtin_eh_return_data_regno (exp
);
7778 case BUILT_IN_EXTEND_POINTER
:
7779 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7780 case BUILT_IN_EH_POINTER
:
7781 return expand_builtin_eh_pointer (exp
);
7782 case BUILT_IN_EH_FILTER
:
7783 return expand_builtin_eh_filter (exp
);
7784 case BUILT_IN_EH_COPY_VALUES
:
7785 return expand_builtin_eh_copy_values (exp
);
7787 case BUILT_IN_VA_START
:
7788 return expand_builtin_va_start (exp
);
7789 case BUILT_IN_VA_END
:
7790 return expand_builtin_va_end (exp
);
7791 case BUILT_IN_VA_COPY
:
7792 return expand_builtin_va_copy (exp
);
7793 case BUILT_IN_EXPECT
:
7794 return expand_builtin_expect (exp
, target
);
7795 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
7796 return expand_builtin_expect_with_probability (exp
, target
);
7797 case BUILT_IN_ASSUME_ALIGNED
:
7798 return expand_builtin_assume_aligned (exp
, target
);
7799 case BUILT_IN_PREFETCH
:
7800 expand_builtin_prefetch (exp
);
7803 case BUILT_IN_INIT_TRAMPOLINE
:
7804 return expand_builtin_init_trampoline (exp
, true);
7805 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7806 return expand_builtin_init_trampoline (exp
, false);
7807 case BUILT_IN_ADJUST_TRAMPOLINE
:
7808 return expand_builtin_adjust_trampoline (exp
);
7810 case BUILT_IN_INIT_DESCRIPTOR
:
7811 return expand_builtin_init_descriptor (exp
);
7812 case BUILT_IN_ADJUST_DESCRIPTOR
:
7813 return expand_builtin_adjust_descriptor (exp
);
7816 case BUILT_IN_EXECL
:
7817 case BUILT_IN_EXECV
:
7818 case BUILT_IN_EXECLP
:
7819 case BUILT_IN_EXECLE
:
7820 case BUILT_IN_EXECVP
:
7821 case BUILT_IN_EXECVE
:
7822 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7827 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7828 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7829 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7830 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7831 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7832 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7833 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7838 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7839 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7840 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7841 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7842 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7843 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7844 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7849 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7850 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7851 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7852 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7853 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7854 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7855 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7860 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7861 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7862 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7863 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7864 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7865 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7866 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7871 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7872 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7873 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7874 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7875 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7876 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7877 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7882 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7883 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7884 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7885 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7886 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7887 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7888 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7893 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7894 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7895 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7896 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7897 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7898 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7899 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7904 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7905 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7906 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7907 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7908 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7909 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7910 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7915 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7916 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7917 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7918 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7919 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7920 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7921 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7926 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7927 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7928 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7929 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7930 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7931 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7932 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7937 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7938 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7939 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7940 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7941 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7942 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7943 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7948 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7949 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7950 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7951 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7952 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7953 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7954 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7959 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7960 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7961 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7962 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7963 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7964 if (mode
== VOIDmode
)
7965 mode
= TYPE_MODE (boolean_type_node
);
7966 if (!target
|| !register_operand (target
, mode
))
7967 target
= gen_reg_rtx (mode
);
7969 mode
= get_builtin_sync_mode
7970 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7971 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7976 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7977 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7978 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7979 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7980 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7981 mode
= get_builtin_sync_mode
7982 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7983 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7988 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7989 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7990 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7991 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7992 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7993 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7994 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7999 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
8000 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
8001 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
8002 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
8003 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
8004 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
8005 expand_builtin_sync_lock_release (mode
, exp
);
8008 case BUILT_IN_SYNC_SYNCHRONIZE
:
8009 expand_builtin_sync_synchronize ();
8012 case BUILT_IN_ATOMIC_EXCHANGE_1
:
8013 case BUILT_IN_ATOMIC_EXCHANGE_2
:
8014 case BUILT_IN_ATOMIC_EXCHANGE_4
:
8015 case BUILT_IN_ATOMIC_EXCHANGE_8
:
8016 case BUILT_IN_ATOMIC_EXCHANGE_16
:
8017 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
8018 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
8023 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
8024 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
8025 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
8026 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
8027 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
8029 unsigned int nargs
, z
;
8030 vec
<tree
, va_gc
> *vec
;
8033 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
8034 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
8038 /* If this is turned into an external library call, the weak parameter
8039 must be dropped to match the expected parameter list. */
8040 nargs
= call_expr_nargs (exp
);
8041 vec_alloc (vec
, nargs
- 1);
8042 for (z
= 0; z
< 3; z
++)
8043 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8044 /* Skip the boolean weak parameter. */
8045 for (z
= 4; z
< 6; z
++)
8046 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8047 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
8051 case BUILT_IN_ATOMIC_LOAD_1
:
8052 case BUILT_IN_ATOMIC_LOAD_2
:
8053 case BUILT_IN_ATOMIC_LOAD_4
:
8054 case BUILT_IN_ATOMIC_LOAD_8
:
8055 case BUILT_IN_ATOMIC_LOAD_16
:
8056 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
8057 target
= expand_builtin_atomic_load (mode
, exp
, target
);
8062 case BUILT_IN_ATOMIC_STORE_1
:
8063 case BUILT_IN_ATOMIC_STORE_2
:
8064 case BUILT_IN_ATOMIC_STORE_4
:
8065 case BUILT_IN_ATOMIC_STORE_8
:
8066 case BUILT_IN_ATOMIC_STORE_16
:
8067 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
8068 target
= expand_builtin_atomic_store (mode
, exp
);
8073 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
8074 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
8075 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
8076 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
8077 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
8079 enum built_in_function lib
;
8080 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
8081 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
8082 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
8083 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
8089 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
8090 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
8091 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
8092 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
8093 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
8095 enum built_in_function lib
;
8096 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
8097 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
8098 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
8099 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
8105 case BUILT_IN_ATOMIC_AND_FETCH_1
:
8106 case BUILT_IN_ATOMIC_AND_FETCH_2
:
8107 case BUILT_IN_ATOMIC_AND_FETCH_4
:
8108 case BUILT_IN_ATOMIC_AND_FETCH_8
:
8109 case BUILT_IN_ATOMIC_AND_FETCH_16
:
8111 enum built_in_function lib
;
8112 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
8113 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
8114 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
8115 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
8121 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
8122 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
8123 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
8124 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
8125 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
8127 enum built_in_function lib
;
8128 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
8129 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
8130 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
8131 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
8137 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
8138 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
8139 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
8140 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
8141 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
8143 enum built_in_function lib
;
8144 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
8145 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
8146 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
8147 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
8153 case BUILT_IN_ATOMIC_OR_FETCH_1
:
8154 case BUILT_IN_ATOMIC_OR_FETCH_2
:
8155 case BUILT_IN_ATOMIC_OR_FETCH_4
:
8156 case BUILT_IN_ATOMIC_OR_FETCH_8
:
8157 case BUILT_IN_ATOMIC_OR_FETCH_16
:
8159 enum built_in_function lib
;
8160 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
8161 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
8162 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
8163 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
8169 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
8170 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
8171 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
8172 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
8173 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
8174 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
8175 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
8176 ignore
, BUILT_IN_NONE
);
8181 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
8182 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
8183 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
8184 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
8185 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
8186 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
8187 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
8188 ignore
, BUILT_IN_NONE
);
8193 case BUILT_IN_ATOMIC_FETCH_AND_1
:
8194 case BUILT_IN_ATOMIC_FETCH_AND_2
:
8195 case BUILT_IN_ATOMIC_FETCH_AND_4
:
8196 case BUILT_IN_ATOMIC_FETCH_AND_8
:
8197 case BUILT_IN_ATOMIC_FETCH_AND_16
:
8198 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
8199 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
8200 ignore
, BUILT_IN_NONE
);
8205 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
8206 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
8207 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
8208 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
8209 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
8210 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
8211 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
8212 ignore
, BUILT_IN_NONE
);
8217 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
8218 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
8219 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
8220 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
8221 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
8222 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
8223 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
8224 ignore
, BUILT_IN_NONE
);
8229 case BUILT_IN_ATOMIC_FETCH_OR_1
:
8230 case BUILT_IN_ATOMIC_FETCH_OR_2
:
8231 case BUILT_IN_ATOMIC_FETCH_OR_4
:
8232 case BUILT_IN_ATOMIC_FETCH_OR_8
:
8233 case BUILT_IN_ATOMIC_FETCH_OR_16
:
8234 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
8235 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
8236 ignore
, BUILT_IN_NONE
);
8241 case BUILT_IN_ATOMIC_TEST_AND_SET
:
8242 return expand_builtin_atomic_test_and_set (exp
, target
);
8244 case BUILT_IN_ATOMIC_CLEAR
:
8245 return expand_builtin_atomic_clear (exp
);
8247 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8248 return expand_builtin_atomic_always_lock_free (exp
);
8250 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8251 target
= expand_builtin_atomic_is_lock_free (exp
);
8256 case BUILT_IN_ATOMIC_THREAD_FENCE
:
8257 expand_builtin_atomic_thread_fence (exp
);
8260 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
8261 expand_builtin_atomic_signal_fence (exp
);
8264 case BUILT_IN_OBJECT_SIZE
:
8265 return expand_builtin_object_size (exp
);
8267 case BUILT_IN_MEMCPY_CHK
:
8268 case BUILT_IN_MEMPCPY_CHK
:
8269 case BUILT_IN_MEMMOVE_CHK
:
8270 case BUILT_IN_MEMSET_CHK
:
8271 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
8276 case BUILT_IN_STRCPY_CHK
:
8277 case BUILT_IN_STPCPY_CHK
:
8278 case BUILT_IN_STRNCPY_CHK
:
8279 case BUILT_IN_STPNCPY_CHK
:
8280 case BUILT_IN_STRCAT_CHK
:
8281 case BUILT_IN_STRNCAT_CHK
:
8282 case BUILT_IN_SNPRINTF_CHK
:
8283 case BUILT_IN_VSNPRINTF_CHK
:
8284 maybe_emit_chk_warning (exp
, fcode
);
8287 case BUILT_IN_SPRINTF_CHK
:
8288 case BUILT_IN_VSPRINTF_CHK
:
8289 maybe_emit_sprintf_chk_warning (exp
, fcode
);
8293 if (warn_free_nonheap_object
)
8294 maybe_emit_free_warning (exp
);
8297 case BUILT_IN_THREAD_POINTER
:
8298 return expand_builtin_thread_pointer (exp
, target
);
8300 case BUILT_IN_SET_THREAD_POINTER
:
8301 expand_builtin_set_thread_pointer (exp
);
8304 case BUILT_IN_ACC_ON_DEVICE
:
8305 /* Do library call, if we failed to expand the builtin when
8309 case BUILT_IN_GOACC_PARLEVEL_ID
:
8310 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
8311 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
8313 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
8314 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
8316 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
8317 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
8318 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
8319 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
8320 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
8321 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
8322 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
8324 default: /* just do library call, if unknown builtin */
8328 /* The switch statement above can drop through to cause the function
8329 to be called normally. */
8330 return expand_call (exp
, target
, ignore
);
8333 /* Determine whether a tree node represents a call to a built-in
8334 function. If the tree T is a call to a built-in function with
8335 the right number of arguments of the appropriate types, return
8336 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8337 Otherwise the return value is END_BUILTINS. */
8339 enum built_in_function
8340 builtin_mathfn_code (const_tree t
)
8342 const_tree fndecl
, arg
, parmlist
;
8343 const_tree argtype
, parmtype
;
8344 const_call_expr_arg_iterator iter
;
8346 if (TREE_CODE (t
) != CALL_EXPR
)
8347 return END_BUILTINS
;
8349 fndecl
= get_callee_fndecl (t
);
8350 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8351 return END_BUILTINS
;
8353 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8354 init_const_call_expr_arg_iterator (t
, &iter
);
8355 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
8357 /* If a function doesn't take a variable number of arguments,
8358 the last element in the list will have type `void'. */
8359 parmtype
= TREE_VALUE (parmlist
);
8360 if (VOID_TYPE_P (parmtype
))
8362 if (more_const_call_expr_args_p (&iter
))
8363 return END_BUILTINS
;
8364 return DECL_FUNCTION_CODE (fndecl
);
8367 if (! more_const_call_expr_args_p (&iter
))
8368 return END_BUILTINS
;
8370 arg
= next_const_call_expr_arg (&iter
);
8371 argtype
= TREE_TYPE (arg
);
8373 if (SCALAR_FLOAT_TYPE_P (parmtype
))
8375 if (! SCALAR_FLOAT_TYPE_P (argtype
))
8376 return END_BUILTINS
;
8378 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
8380 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
8381 return END_BUILTINS
;
8383 else if (POINTER_TYPE_P (parmtype
))
8385 if (! POINTER_TYPE_P (argtype
))
8386 return END_BUILTINS
;
8388 else if (INTEGRAL_TYPE_P (parmtype
))
8390 if (! INTEGRAL_TYPE_P (argtype
))
8391 return END_BUILTINS
;
8394 return END_BUILTINS
;
8397 /* Variable-length argument list. */
8398 return DECL_FUNCTION_CODE (fndecl
);
8401 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8402 evaluate to a constant. */
8405 fold_builtin_constant_p (tree arg
)
8407 /* We return 1 for a numeric type that's known to be a constant
8408 value at compile-time or for an aggregate type that's a
8409 literal constant. */
8412 /* If we know this is a constant, emit the constant of one. */
8413 if (CONSTANT_CLASS_P (arg
)
8414 || (TREE_CODE (arg
) == CONSTRUCTOR
8415 && TREE_CONSTANT (arg
)))
8416 return integer_one_node
;
8417 if (TREE_CODE (arg
) == ADDR_EXPR
)
8419 tree op
= TREE_OPERAND (arg
, 0);
8420 if (TREE_CODE (op
) == STRING_CST
8421 || (TREE_CODE (op
) == ARRAY_REF
8422 && integer_zerop (TREE_OPERAND (op
, 1))
8423 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
8424 return integer_one_node
;
8427 /* If this expression has side effects, show we don't know it to be a
8428 constant. Likewise if it's a pointer or aggregate type since in
8429 those case we only want literals, since those are only optimized
8430 when generating RTL, not later.
8431 And finally, if we are compiling an initializer, not code, we
8432 need to return a definite result now; there's not going to be any
8433 more optimization done. */
8434 if (TREE_SIDE_EFFECTS (arg
)
8435 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
8436 || POINTER_TYPE_P (TREE_TYPE (arg
))
8438 || folding_initializer
8439 || force_folding_builtin_constant_p
)
8440 return integer_zero_node
;
8445 /* Create builtin_expect or builtin_expect_with_probability
8446 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8447 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8448 builtin_expect_with_probability instead uses third argument as PROBABILITY
8452 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
8453 tree predictor
, tree probability
)
8455 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
8457 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
8458 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
8459 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
8460 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
8461 pred_type
= TREE_VALUE (arg_types
);
8462 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
8464 pred
= fold_convert_loc (loc
, pred_type
, pred
);
8465 expected
= fold_convert_loc (loc
, expected_type
, expected
);
8468 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
8470 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
8473 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
8474 build_int_cst (ret_type
, 0));
8477 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8478 NULL_TREE if no simplification is possible. */
8481 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
8484 tree inner
, fndecl
, inner_arg0
;
8485 enum tree_code code
;
8487 /* Distribute the expected value over short-circuiting operators.
8488 See through the cast from truthvalue_type_node to long. */
8490 while (CONVERT_EXPR_P (inner_arg0
)
8491 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
8492 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
8493 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
8495 /* If this is a builtin_expect within a builtin_expect keep the
8496 inner one. See through a comparison against a constant. It
8497 might have been added to create a thruthvalue. */
8500 if (COMPARISON_CLASS_P (inner
)
8501 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8502 inner
= TREE_OPERAND (inner
, 0);
8504 if (TREE_CODE (inner
) == CALL_EXPR
8505 && (fndecl
= get_callee_fndecl (inner
))
8506 && (fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
8507 || fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT_WITH_PROBABILITY
)))
8511 code
= TREE_CODE (inner
);
8512 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8514 tree op0
= TREE_OPERAND (inner
, 0);
8515 tree op1
= TREE_OPERAND (inner
, 1);
8516 arg1
= save_expr (arg1
);
8518 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
8519 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
8520 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8522 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8525 /* If the argument isn't invariant then there's nothing else we can do. */
8526 if (!TREE_CONSTANT (inner_arg0
))
8529 /* If we expect that a comparison against the argument will fold to
8530 a constant return the constant. In practice, this means a true
8531 constant or the address of a non-weak symbol. */
8534 if (TREE_CODE (inner
) == ADDR_EXPR
)
8538 inner
= TREE_OPERAND (inner
, 0);
8540 while (TREE_CODE (inner
) == COMPONENT_REF
8541 || TREE_CODE (inner
) == ARRAY_REF
);
8542 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8546 /* Otherwise, ARG0 already has the proper type for the return value. */
8550 /* Fold a call to __builtin_classify_type with argument ARG. */
8553 fold_builtin_classify_type (tree arg
)
8556 return build_int_cst (integer_type_node
, no_type_class
);
8558 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8561 /* Fold a call to __builtin_strlen with argument ARG. */
8564 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
8566 if (!validate_arg (arg
, POINTER_TYPE
))
8571 memset (&data
, 0, sizeof (c_strlen_data
));
8572 tree len
= c_strlen (arg
, 0, &data
);
8575 return fold_convert_loc (loc
, type
, len
);
8578 c_strlen (arg
, 1, &data
);
8582 if (EXPR_HAS_LOCATION (arg
))
8583 loc
= EXPR_LOCATION (arg
);
8584 else if (loc
== UNKNOWN_LOCATION
)
8585 loc
= input_location
;
8586 warn_string_no_nul (loc
, "strlen", arg
, data
.decl
);
8593 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8596 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8598 REAL_VALUE_TYPE real
;
8600 /* __builtin_inff is intended to be usable to define INFINITY on all
8601 targets. If an infinity is not available, INFINITY expands "to a
8602 positive constant of type float that overflows at translation
8603 time", footnote "In this case, using INFINITY will violate the
8604 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8605 Thus we pedwarn to ensure this constraint violation is
8607 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8608 pedwarn (loc
, 0, "target format does not support infinity");
8611 return build_real (type
, real
);
8614 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8615 NULL_TREE if no simplification can be made. */
8618 fold_builtin_sincos (location_t loc
,
8619 tree arg0
, tree arg1
, tree arg2
)
8622 tree fndecl
, call
= NULL_TREE
;
8624 if (!validate_arg (arg0
, REAL_TYPE
)
8625 || !validate_arg (arg1
, POINTER_TYPE
)
8626 || !validate_arg (arg2
, POINTER_TYPE
))
8629 type
= TREE_TYPE (arg0
);
8631 /* Calculate the result when the argument is a constant. */
8632 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8633 if (fn
== END_BUILTINS
)
8636 /* Canonicalize sincos to cexpi. */
8637 if (TREE_CODE (arg0
) == REAL_CST
)
8639 tree complex_type
= build_complex_type (type
);
8640 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8644 if (!targetm
.libc_has_function (function_c99_math_complex
)
8645 || !builtin_decl_implicit_p (fn
))
8647 fndecl
= builtin_decl_explicit (fn
);
8648 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8649 call
= builtin_save_expr (call
);
8652 tree ptype
= build_pointer_type (type
);
8653 arg1
= fold_convert (ptype
, arg1
);
8654 arg2
= fold_convert (ptype
, arg2
);
8655 return build2 (COMPOUND_EXPR
, void_type_node
,
8656 build2 (MODIFY_EXPR
, void_type_node
,
8657 build_fold_indirect_ref_loc (loc
, arg1
),
8658 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8659 build2 (MODIFY_EXPR
, void_type_node
,
8660 build_fold_indirect_ref_loc (loc
, arg2
),
8661 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8664 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8665 Return NULL_TREE if no simplification can be made. */
8668 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8670 if (!validate_arg (arg1
, POINTER_TYPE
)
8671 || !validate_arg (arg2
, POINTER_TYPE
)
8672 || !validate_arg (len
, INTEGER_TYPE
))
8675 /* If the LEN parameter is zero, return zero. */
8676 if (integer_zerop (len
))
8677 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8680 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8681 if (operand_equal_p (arg1
, arg2
, 0))
8682 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8684 /* If len parameter is one, return an expression corresponding to
8685 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8686 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8688 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8689 tree cst_uchar_ptr_node
8690 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8693 = fold_convert_loc (loc
, integer_type_node
,
8694 build1 (INDIRECT_REF
, cst_uchar_node
,
8695 fold_convert_loc (loc
,
8699 = fold_convert_loc (loc
, integer_type_node
,
8700 build1 (INDIRECT_REF
, cst_uchar_node
,
8701 fold_convert_loc (loc
,
8704 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8710 /* Fold a call to builtin isascii with argument ARG. */
8713 fold_builtin_isascii (location_t loc
, tree arg
)
8715 if (!validate_arg (arg
, INTEGER_TYPE
))
8719 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8720 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8721 build_int_cst (integer_type_node
,
8722 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8723 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8724 arg
, integer_zero_node
);
8728 /* Fold a call to builtin toascii with argument ARG. */
8731 fold_builtin_toascii (location_t loc
, tree arg
)
8733 if (!validate_arg (arg
, INTEGER_TYPE
))
8736 /* Transform toascii(c) -> (c & 0x7f). */
8737 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8738 build_int_cst (integer_type_node
, 0x7f));
8741 /* Fold a call to builtin isdigit with argument ARG. */
8744 fold_builtin_isdigit (location_t loc
, tree arg
)
8746 if (!validate_arg (arg
, INTEGER_TYPE
))
8750 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8751 /* According to the C standard, isdigit is unaffected by locale.
8752 However, it definitely is affected by the target character set. */
8753 unsigned HOST_WIDE_INT target_digit0
8754 = lang_hooks
.to_target_charset ('0');
8756 if (target_digit0
== 0)
8759 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8760 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8761 build_int_cst (unsigned_type_node
, target_digit0
));
8762 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8763 build_int_cst (unsigned_type_node
, 9));
8767 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8770 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8772 if (!validate_arg (arg
, REAL_TYPE
))
8775 arg
= fold_convert_loc (loc
, type
, arg
);
8776 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8779 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8782 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8784 if (!validate_arg (arg
, INTEGER_TYPE
))
8787 arg
= fold_convert_loc (loc
, type
, arg
);
8788 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8791 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8794 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8796 if (validate_arg (arg
, COMPLEX_TYPE
)
8797 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8799 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8803 tree new_arg
= builtin_save_expr (arg
);
8804 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8805 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8806 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8813 /* Fold a call to builtin frexp, we can assume the base is 2. */
8816 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8818 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8823 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8826 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8828 /* Proceed if a valid pointer type was passed in. */
8829 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8831 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8837 /* For +-0, return (*exp = 0, +-0). */
8838 exp
= integer_zero_node
;
8843 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8844 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8847 /* Since the frexp function always expects base 2, and in
8848 GCC normalized significands are already in the range
8849 [0.5, 1.0), we have exactly what frexp wants. */
8850 REAL_VALUE_TYPE frac_rvt
= *value
;
8851 SET_REAL_EXP (&frac_rvt
, 0);
8852 frac
= build_real (rettype
, frac_rvt
);
8853 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8860 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8861 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8862 TREE_SIDE_EFFECTS (arg1
) = 1;
8863 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8869 /* Fold a call to builtin modf. */
8872 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8874 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8879 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8882 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8884 /* Proceed if a valid pointer type was passed in. */
8885 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8887 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8888 REAL_VALUE_TYPE trunc
, frac
;
8894 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8895 trunc
= frac
= *value
;
8898 /* For +-Inf, return (*arg1 = arg0, +-0). */
8900 frac
.sign
= value
->sign
;
8904 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8905 real_trunc (&trunc
, VOIDmode
, value
);
8906 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8907 /* If the original number was negative and already
8908 integral, then the fractional part is -0.0. */
8909 if (value
->sign
&& frac
.cl
== rvc_zero
)
8910 frac
.sign
= value
->sign
;
8914 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8915 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8916 build_real (rettype
, trunc
));
8917 TREE_SIDE_EFFECTS (arg1
) = 1;
8918 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8919 build_real (rettype
, frac
));
8925 /* Given a location LOC, an interclass builtin function decl FNDECL
8926 and its single argument ARG, return an folded expression computing
8927 the same, or NULL_TREE if we either couldn't or didn't want to fold
8928 (the latter happen if there's an RTL instruction available). */
8931 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8935 if (!validate_arg (arg
, REAL_TYPE
))
8938 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8941 mode
= TYPE_MODE (TREE_TYPE (arg
));
8943 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8945 /* If there is no optab, try generic code. */
8946 switch (DECL_FUNCTION_CODE (fndecl
))
8950 CASE_FLT_FN (BUILT_IN_ISINF
):
8952 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8953 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8954 tree type
= TREE_TYPE (arg
);
8958 if (is_ibm_extended
)
8960 /* NaN and Inf are encoded in the high-order double value
8961 only. The low-order value is not significant. */
8962 type
= double_type_node
;
8964 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8966 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8967 real_from_string (&r
, buf
);
8968 result
= build_call_expr (isgr_fn
, 2,
8969 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8970 build_real (type
, r
));
8973 CASE_FLT_FN (BUILT_IN_FINITE
):
8974 case BUILT_IN_ISFINITE
:
8976 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8977 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8978 tree type
= TREE_TYPE (arg
);
8982 if (is_ibm_extended
)
8984 /* NaN and Inf are encoded in the high-order double value
8985 only. The low-order value is not significant. */
8986 type
= double_type_node
;
8988 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8990 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8991 real_from_string (&r
, buf
);
8992 result
= build_call_expr (isle_fn
, 2,
8993 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8994 build_real (type
, r
));
8995 /*result = fold_build2_loc (loc, UNGT_EXPR,
8996 TREE_TYPE (TREE_TYPE (fndecl)),
8997 fold_build1_loc (loc, ABS_EXPR, type, arg),
8998 build_real (type, r));
8999 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9000 TREE_TYPE (TREE_TYPE (fndecl)),
9004 case BUILT_IN_ISNORMAL
:
9006 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9007 islessequal(fabs(x),DBL_MAX). */
9008 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9009 tree type
= TREE_TYPE (arg
);
9010 tree orig_arg
, max_exp
, min_exp
;
9011 machine_mode orig_mode
= mode
;
9012 REAL_VALUE_TYPE rmax
, rmin
;
9015 orig_arg
= arg
= builtin_save_expr (arg
);
9016 if (is_ibm_extended
)
9018 /* Use double to test the normal range of IBM extended
9019 precision. Emin for IBM extended precision is
9020 different to emin for IEEE double, being 53 higher
9021 since the low double exponent is at least 53 lower
9022 than the high double exponent. */
9023 type
= double_type_node
;
9025 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9027 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9029 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9030 real_from_string (&rmax
, buf
);
9031 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
9032 real_from_string (&rmin
, buf
);
9033 max_exp
= build_real (type
, rmax
);
9034 min_exp
= build_real (type
, rmin
);
9036 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
9037 if (is_ibm_extended
)
9039 /* Testing the high end of the range is done just using
9040 the high double, using the same test as isfinite().
9041 For the subnormal end of the range we first test the
9042 high double, then if its magnitude is equal to the
9043 limit of 0x1p-969, we test whether the low double is
9044 non-zero and opposite sign to the high double. */
9045 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
9046 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9047 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
9048 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
9050 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
9051 complex_double_type_node
, orig_arg
);
9052 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
9053 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
9054 tree zero
= build_real (type
, dconst0
);
9055 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
9056 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
9057 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
9058 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
9059 fold_build3 (COND_EXPR
,
9062 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
9064 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
9070 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9071 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
9073 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
9084 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9085 ARG is the argument for the call. */
9088 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9090 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9092 if (!validate_arg (arg
, REAL_TYPE
))
9095 switch (builtin_index
)
9097 case BUILT_IN_ISINF
:
9098 if (!HONOR_INFINITIES (arg
))
9099 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9103 case BUILT_IN_ISINF_SIGN
:
9105 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9106 /* In a boolean context, GCC will fold the inner COND_EXPR to
9107 1. So e.g. "if (isinf_sign(x))" would be folded to just
9108 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9109 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
9110 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9111 tree tmp
= NULL_TREE
;
9113 arg
= builtin_save_expr (arg
);
9115 if (signbit_fn
&& isinf_fn
)
9117 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9118 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9120 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9121 signbit_call
, integer_zero_node
);
9122 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9123 isinf_call
, integer_zero_node
);
9125 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9126 integer_minus_one_node
, integer_one_node
);
9127 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9135 case BUILT_IN_ISFINITE
:
9136 if (!HONOR_NANS (arg
)
9137 && !HONOR_INFINITIES (arg
))
9138 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9142 case BUILT_IN_ISNAN
:
9143 if (!HONOR_NANS (arg
))
9144 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9147 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
9148 if (is_ibm_extended
)
9150 /* NaN and Inf are encoded in the high-order double value
9151 only. The low-order value is not significant. */
9152 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
9155 arg
= builtin_save_expr (arg
);
9156 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9163 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9164 This builtin will generate code to return the appropriate floating
9165 point classification depending on the value of the floating point
9166 number passed in. The possible return values must be supplied as
9167 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9168 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9169 one floating point argument which is "type generic". */
9172 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9174 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9175 arg
, type
, res
, tmp
;
9180 /* Verify the required arguments in the original call. */
9182 || !validate_arg (args
[0], INTEGER_TYPE
)
9183 || !validate_arg (args
[1], INTEGER_TYPE
)
9184 || !validate_arg (args
[2], INTEGER_TYPE
)
9185 || !validate_arg (args
[3], INTEGER_TYPE
)
9186 || !validate_arg (args
[4], INTEGER_TYPE
)
9187 || !validate_arg (args
[5], REAL_TYPE
))
9191 fp_infinite
= args
[1];
9192 fp_normal
= args
[2];
9193 fp_subnormal
= args
[3];
9196 type
= TREE_TYPE (arg
);
9197 mode
= TYPE_MODE (type
);
9198 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9202 (fabs(x) == Inf ? FP_INFINITE :
9203 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9204 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9206 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9207 build_real (type
, dconst0
));
9208 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9209 tmp
, fp_zero
, fp_subnormal
);
9211 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9212 real_from_string (&r
, buf
);
9213 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9214 arg
, build_real (type
, r
));
9215 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9217 if (HONOR_INFINITIES (mode
))
9220 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9221 build_real (type
, r
));
9222 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9226 if (HONOR_NANS (mode
))
9228 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9229 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9235 /* Fold a call to an unordered comparison function such as
9236 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9237 being called and ARG0 and ARG1 are the arguments for the call.
9238 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9239 the opposite of the desired result. UNORDERED_CODE is used
9240 for modes that can hold NaNs and ORDERED_CODE is used for
9244 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9245 enum tree_code unordered_code
,
9246 enum tree_code ordered_code
)
9248 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9249 enum tree_code code
;
9251 enum tree_code code0
, code1
;
9252 tree cmp_type
= NULL_TREE
;
9254 type0
= TREE_TYPE (arg0
);
9255 type1
= TREE_TYPE (arg1
);
9257 code0
= TREE_CODE (type0
);
9258 code1
= TREE_CODE (type1
);
9260 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9261 /* Choose the wider of two real types. */
9262 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9264 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9266 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9269 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9270 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9272 if (unordered_code
== UNORDERED_EXPR
)
9274 if (!HONOR_NANS (arg0
))
9275 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9276 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9279 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9280 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9281 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9284 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9285 arithmetics if it can never overflow, or into internal functions that
9286 return both result of arithmetics and overflowed boolean flag in
9287 a complex integer result, or some other check for overflow.
9288 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9289 checking part of that. */
9292 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9293 tree arg0
, tree arg1
, tree arg2
)
9295 enum internal_fn ifn
= IFN_LAST
;
9296 /* The code of the expression corresponding to the type-generic
9297 built-in, or ERROR_MARK for the type-specific ones. */
9298 enum tree_code opcode
= ERROR_MARK
;
9299 bool ovf_only
= false;
9303 case BUILT_IN_ADD_OVERFLOW_P
:
9306 case BUILT_IN_ADD_OVERFLOW
:
9309 case BUILT_IN_SADD_OVERFLOW
:
9310 case BUILT_IN_SADDL_OVERFLOW
:
9311 case BUILT_IN_SADDLL_OVERFLOW
:
9312 case BUILT_IN_UADD_OVERFLOW
:
9313 case BUILT_IN_UADDL_OVERFLOW
:
9314 case BUILT_IN_UADDLL_OVERFLOW
:
9315 ifn
= IFN_ADD_OVERFLOW
;
9317 case BUILT_IN_SUB_OVERFLOW_P
:
9320 case BUILT_IN_SUB_OVERFLOW
:
9321 opcode
= MINUS_EXPR
;
9323 case BUILT_IN_SSUB_OVERFLOW
:
9324 case BUILT_IN_SSUBL_OVERFLOW
:
9325 case BUILT_IN_SSUBLL_OVERFLOW
:
9326 case BUILT_IN_USUB_OVERFLOW
:
9327 case BUILT_IN_USUBL_OVERFLOW
:
9328 case BUILT_IN_USUBLL_OVERFLOW
:
9329 ifn
= IFN_SUB_OVERFLOW
;
9331 case BUILT_IN_MUL_OVERFLOW_P
:
9334 case BUILT_IN_MUL_OVERFLOW
:
9337 case BUILT_IN_SMUL_OVERFLOW
:
9338 case BUILT_IN_SMULL_OVERFLOW
:
9339 case BUILT_IN_SMULLL_OVERFLOW
:
9340 case BUILT_IN_UMUL_OVERFLOW
:
9341 case BUILT_IN_UMULL_OVERFLOW
:
9342 case BUILT_IN_UMULLL_OVERFLOW
:
9343 ifn
= IFN_MUL_OVERFLOW
;
9349 /* For the "generic" overloads, the first two arguments can have different
9350 types and the last argument determines the target type to use to check
9351 for overflow. The arguments of the other overloads all have the same
9353 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
9355 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9356 arguments are constant, attempt to fold the built-in call into a constant
9357 expression indicating whether or not it detected an overflow. */
9359 && TREE_CODE (arg0
) == INTEGER_CST
9360 && TREE_CODE (arg1
) == INTEGER_CST
)
9361 /* Perform the computation in the target type and check for overflow. */
9362 return omit_one_operand_loc (loc
, boolean_type_node
,
9363 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9364 ? boolean_true_node
: boolean_false_node
,
9367 tree ctype
= build_complex_type (type
);
9368 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9370 tree tgt
= save_expr (call
);
9371 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9372 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9373 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9376 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
9378 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9380 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9381 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9384 /* Fold a call to __builtin_FILE to a constant string. */
9387 fold_builtin_FILE (location_t loc
)
9389 if (const char *fname
= LOCATION_FILE (loc
))
9391 /* The documentation says this builtin is equivalent to the preprocessor
9392 __FILE__ macro so it appears appropriate to use the same file prefix
9394 fname
= remap_macro_filename (fname
);
9395 return build_string_literal (strlen (fname
) + 1, fname
);
9398 return build_string_literal (1, "");
9401 /* Fold a call to __builtin_FUNCTION to a constant string. */
9404 fold_builtin_FUNCTION ()
9406 const char *name
= "";
9408 if (current_function_decl
)
9409 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
9411 return build_string_literal (strlen (name
) + 1, name
);
9414 /* Fold a call to __builtin_LINE to an integer constant. */
9417 fold_builtin_LINE (location_t loc
, tree type
)
9419 return build_int_cst (type
, LOCATION_LINE (loc
));
9422 /* Fold a call to built-in function FNDECL with 0 arguments.
9423 This function returns NULL_TREE if no simplification was possible. */
9426 fold_builtin_0 (location_t loc
, tree fndecl
)
9428 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9429 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9433 return fold_builtin_FILE (loc
);
9435 case BUILT_IN_FUNCTION
:
9436 return fold_builtin_FUNCTION ();
9439 return fold_builtin_LINE (loc
, type
);
9441 CASE_FLT_FN (BUILT_IN_INF
):
9442 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
9443 case BUILT_IN_INFD32
:
9444 case BUILT_IN_INFD64
:
9445 case BUILT_IN_INFD128
:
9446 return fold_builtin_inf (loc
, type
, true);
9448 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9449 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
9450 return fold_builtin_inf (loc
, type
, false);
9452 case BUILT_IN_CLASSIFY_TYPE
:
9453 return fold_builtin_classify_type (NULL_TREE
);
9461 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9462 This function returns NULL_TREE if no simplification was possible. */
9465 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9467 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9468 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9470 if (TREE_CODE (arg0
) == ERROR_MARK
)
9473 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
9478 case BUILT_IN_CONSTANT_P
:
9480 tree val
= fold_builtin_constant_p (arg0
);
9482 /* Gimplification will pull the CALL_EXPR for the builtin out of
9483 an if condition. When not optimizing, we'll not CSE it back.
9484 To avoid link error types of regressions, return false now. */
9485 if (!val
&& !optimize
)
9486 val
= integer_zero_node
;
9491 case BUILT_IN_CLASSIFY_TYPE
:
9492 return fold_builtin_classify_type (arg0
);
9494 case BUILT_IN_STRLEN
:
9495 return fold_builtin_strlen (loc
, type
, arg0
);
9497 CASE_FLT_FN (BUILT_IN_FABS
):
9498 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
9499 case BUILT_IN_FABSD32
:
9500 case BUILT_IN_FABSD64
:
9501 case BUILT_IN_FABSD128
:
9502 return fold_builtin_fabs (loc
, arg0
, type
);
9506 case BUILT_IN_LLABS
:
9507 case BUILT_IN_IMAXABS
:
9508 return fold_builtin_abs (loc
, arg0
, type
);
9510 CASE_FLT_FN (BUILT_IN_CONJ
):
9511 if (validate_arg (arg0
, COMPLEX_TYPE
)
9512 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9513 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9516 CASE_FLT_FN (BUILT_IN_CREAL
):
9517 if (validate_arg (arg0
, COMPLEX_TYPE
)
9518 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9519 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9522 CASE_FLT_FN (BUILT_IN_CIMAG
):
9523 if (validate_arg (arg0
, COMPLEX_TYPE
)
9524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9525 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9528 CASE_FLT_FN (BUILT_IN_CARG
):
9529 return fold_builtin_carg (loc
, arg0
, type
);
9531 case BUILT_IN_ISASCII
:
9532 return fold_builtin_isascii (loc
, arg0
);
9534 case BUILT_IN_TOASCII
:
9535 return fold_builtin_toascii (loc
, arg0
);
9537 case BUILT_IN_ISDIGIT
:
9538 return fold_builtin_isdigit (loc
, arg0
);
9540 CASE_FLT_FN (BUILT_IN_FINITE
):
9541 case BUILT_IN_FINITED32
:
9542 case BUILT_IN_FINITED64
:
9543 case BUILT_IN_FINITED128
:
9544 case BUILT_IN_ISFINITE
:
9546 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9549 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9552 CASE_FLT_FN (BUILT_IN_ISINF
):
9553 case BUILT_IN_ISINFD32
:
9554 case BUILT_IN_ISINFD64
:
9555 case BUILT_IN_ISINFD128
:
9557 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9560 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9563 case BUILT_IN_ISNORMAL
:
9564 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9566 case BUILT_IN_ISINF_SIGN
:
9567 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9569 CASE_FLT_FN (BUILT_IN_ISNAN
):
9570 case BUILT_IN_ISNAND32
:
9571 case BUILT_IN_ISNAND64
:
9572 case BUILT_IN_ISNAND128
:
9573 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9576 if (integer_zerop (arg0
))
9577 return build_empty_stmt (loc
);
9588 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9589 This function returns NULL_TREE if no simplification was possible. */
9592 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9594 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9595 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9597 if (TREE_CODE (arg0
) == ERROR_MARK
9598 || TREE_CODE (arg1
) == ERROR_MARK
)
9601 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9606 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9607 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9608 if (validate_arg (arg0
, REAL_TYPE
)
9609 && validate_arg (arg1
, POINTER_TYPE
))
9610 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9613 CASE_FLT_FN (BUILT_IN_FREXP
):
9614 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9616 CASE_FLT_FN (BUILT_IN_MODF
):
9617 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9619 case BUILT_IN_STRSPN
:
9620 return fold_builtin_strspn (loc
, arg0
, arg1
);
9622 case BUILT_IN_STRCSPN
:
9623 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9625 case BUILT_IN_STRPBRK
:
9626 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9628 case BUILT_IN_EXPECT
:
9629 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
9631 case BUILT_IN_ISGREATER
:
9632 return fold_builtin_unordered_cmp (loc
, fndecl
,
9633 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9634 case BUILT_IN_ISGREATEREQUAL
:
9635 return fold_builtin_unordered_cmp (loc
, fndecl
,
9636 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9637 case BUILT_IN_ISLESS
:
9638 return fold_builtin_unordered_cmp (loc
, fndecl
,
9639 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9640 case BUILT_IN_ISLESSEQUAL
:
9641 return fold_builtin_unordered_cmp (loc
, fndecl
,
9642 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9643 case BUILT_IN_ISLESSGREATER
:
9644 return fold_builtin_unordered_cmp (loc
, fndecl
,
9645 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9646 case BUILT_IN_ISUNORDERED
:
9647 return fold_builtin_unordered_cmp (loc
, fndecl
,
9648 arg0
, arg1
, UNORDERED_EXPR
,
9651 /* We do the folding for va_start in the expander. */
9652 case BUILT_IN_VA_START
:
9655 case BUILT_IN_OBJECT_SIZE
:
9656 return fold_builtin_object_size (arg0
, arg1
);
9658 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9659 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9661 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9662 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9670 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9672 This function returns NULL_TREE if no simplification was possible. */
9675 fold_builtin_3 (location_t loc
, tree fndecl
,
9676 tree arg0
, tree arg1
, tree arg2
)
9678 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9679 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9681 if (TREE_CODE (arg0
) == ERROR_MARK
9682 || TREE_CODE (arg1
) == ERROR_MARK
9683 || TREE_CODE (arg2
) == ERROR_MARK
)
9686 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9693 CASE_FLT_FN (BUILT_IN_SINCOS
):
9694 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9696 CASE_FLT_FN (BUILT_IN_REMQUO
):
9697 if (validate_arg (arg0
, REAL_TYPE
)
9698 && validate_arg (arg1
, REAL_TYPE
)
9699 && validate_arg (arg2
, POINTER_TYPE
))
9700 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9703 case BUILT_IN_MEMCMP
:
9704 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9706 case BUILT_IN_EXPECT
:
9707 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
9709 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
9710 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
9712 case BUILT_IN_ADD_OVERFLOW
:
9713 case BUILT_IN_SUB_OVERFLOW
:
9714 case BUILT_IN_MUL_OVERFLOW
:
9715 case BUILT_IN_ADD_OVERFLOW_P
:
9716 case BUILT_IN_SUB_OVERFLOW_P
:
9717 case BUILT_IN_MUL_OVERFLOW_P
:
9718 case BUILT_IN_SADD_OVERFLOW
:
9719 case BUILT_IN_SADDL_OVERFLOW
:
9720 case BUILT_IN_SADDLL_OVERFLOW
:
9721 case BUILT_IN_SSUB_OVERFLOW
:
9722 case BUILT_IN_SSUBL_OVERFLOW
:
9723 case BUILT_IN_SSUBLL_OVERFLOW
:
9724 case BUILT_IN_SMUL_OVERFLOW
:
9725 case BUILT_IN_SMULL_OVERFLOW
:
9726 case BUILT_IN_SMULLL_OVERFLOW
:
9727 case BUILT_IN_UADD_OVERFLOW
:
9728 case BUILT_IN_UADDL_OVERFLOW
:
9729 case BUILT_IN_UADDLL_OVERFLOW
:
9730 case BUILT_IN_USUB_OVERFLOW
:
9731 case BUILT_IN_USUBL_OVERFLOW
:
9732 case BUILT_IN_USUBLL_OVERFLOW
:
9733 case BUILT_IN_UMUL_OVERFLOW
:
9734 case BUILT_IN_UMULL_OVERFLOW
:
9735 case BUILT_IN_UMULLL_OVERFLOW
:
9736 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9744 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9745 arguments. IGNORE is true if the result of the
9746 function call is ignored. This function returns NULL_TREE if no
9747 simplification was possible. */
9750 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9752 tree ret
= NULL_TREE
;
9757 ret
= fold_builtin_0 (loc
, fndecl
);
9760 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9763 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9766 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9769 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9774 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9775 SET_EXPR_LOCATION (ret
, loc
);
9781 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9782 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9783 of arguments in ARGS to be omitted. OLDNARGS is the number of
9784 elements in ARGS. */
9787 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9788 int skip
, tree fndecl
, int n
, va_list newargs
)
9790 int nargs
= oldnargs
- skip
+ n
;
9797 buffer
= XALLOCAVEC (tree
, nargs
);
9798 for (i
= 0; i
< n
; i
++)
9799 buffer
[i
] = va_arg (newargs
, tree
);
9800 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9801 buffer
[i
] = args
[j
];
9804 buffer
= args
+ skip
;
9806 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9809 /* Return true if FNDECL shouldn't be folded right now.
9810 If a built-in function has an inline attribute always_inline
9811 wrapper, defer folding it after always_inline functions have
9812 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9813 might not be performed. */
9816 avoid_folding_inline_builtin (tree fndecl
)
9818 return (DECL_DECLARED_INLINE_P (fndecl
)
9819 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9821 && !cfun
->always_inline_functions_inlined
9822 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9825 /* A wrapper function for builtin folding that prevents warnings for
9826 "statement without effect" and the like, caused by removing the
9827 call node earlier than the warning is generated. */
9830 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9832 tree ret
= NULL_TREE
;
9833 tree fndecl
= get_callee_fndecl (exp
);
9834 if (fndecl
&& fndecl_built_in_p (fndecl
)
9835 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9836 yet. Defer folding until we see all the arguments
9837 (after inlining). */
9838 && !CALL_EXPR_VA_ARG_PACK (exp
))
9840 int nargs
= call_expr_nargs (exp
);
9842 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9843 instead last argument is __builtin_va_arg_pack (). Defer folding
9844 even in that case, until arguments are finalized. */
9845 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9847 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9848 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9852 if (avoid_folding_inline_builtin (fndecl
))
9855 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9856 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9857 CALL_EXPR_ARGP (exp
), ignore
);
9860 tree
*args
= CALL_EXPR_ARGP (exp
);
9861 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9869 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9870 N arguments are passed in the array ARGARRAY. Return a folded
9871 expression or NULL_TREE if no simplification was possible. */
9874 fold_builtin_call_array (location_t loc
, tree
,
9879 if (TREE_CODE (fn
) != ADDR_EXPR
)
9882 tree fndecl
= TREE_OPERAND (fn
, 0);
9883 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9884 && fndecl_built_in_p (fndecl
))
9886 /* If last argument is __builtin_va_arg_pack (), arguments to this
9887 function are not finalized yet. Defer folding until they are. */
9888 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9890 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9891 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9894 if (avoid_folding_inline_builtin (fndecl
))
9896 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9897 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9899 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9905 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9906 along with N new arguments specified as the "..." parameters. SKIP
9907 is the number of arguments in EXP to be omitted. This function is used
9908 to do varargs-to-varargs transformations. */
9911 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9917 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9918 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9924 /* Validate a single argument ARG against a tree code CODE representing
9925 a type. Return true when argument is valid. */
9928 validate_arg (const_tree arg
, enum tree_code code
)
9932 else if (code
== POINTER_TYPE
)
9933 return POINTER_TYPE_P (TREE_TYPE (arg
));
9934 else if (code
== INTEGER_TYPE
)
9935 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9936 return code
== TREE_CODE (TREE_TYPE (arg
));
9939 /* This function validates the types of a function call argument list
9940 against a specified list of tree_codes. If the last specifier is a 0,
9941 that represents an ellipses, otherwise the last specifier must be a
9944 This is the GIMPLE version of validate_arglist. Eventually we want to
9945 completely convert builtins.c to work from GIMPLEs and the tree based
9946 validate_arglist will then be removed. */
9949 validate_gimple_arglist (const gcall
*call
, ...)
9951 enum tree_code code
;
9957 va_start (ap
, call
);
9962 code
= (enum tree_code
) va_arg (ap
, int);
9966 /* This signifies an ellipses, any further arguments are all ok. */
9970 /* This signifies an endlink, if no arguments remain, return
9971 true, otherwise return false. */
9972 res
= (i
== gimple_call_num_args (call
));
9975 /* If no parameters remain or the parameter's code does not
9976 match the specified code, return false. Otherwise continue
9977 checking any remaining arguments. */
9978 arg
= gimple_call_arg (call
, i
++);
9979 if (!validate_arg (arg
, code
))
9986 /* We need gotos here since we can only have one VA_CLOSE in a
9994 /* Default target-specific builtin expander that does nothing. */
9997 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9998 rtx target ATTRIBUTE_UNUSED
,
9999 rtx subtarget ATTRIBUTE_UNUSED
,
10000 machine_mode mode ATTRIBUTE_UNUSED
,
10001 int ignore ATTRIBUTE_UNUSED
)
10006 /* Returns true is EXP represents data that would potentially reside
10007 in a readonly section. */
10010 readonly_data_expr (tree exp
)
10014 if (TREE_CODE (exp
) != ADDR_EXPR
)
10017 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10021 /* Make sure we call decl_readonly_section only for trees it
10022 can handle (since it returns true for everything it doesn't
10024 if (TREE_CODE (exp
) == STRING_CST
10025 || TREE_CODE (exp
) == CONSTRUCTOR
10026 || (VAR_P (exp
) && TREE_STATIC (exp
)))
10027 return decl_readonly_section (exp
, 0);
10032 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10033 to the call, and TYPE is its return type.
10035 Return NULL_TREE if no simplification was possible, otherwise return the
10036 simplified form of the call as a tree.
10038 The simplified form may be a constant or other expression which
10039 computes the same value, but in a more efficient manner (including
10040 calls to other builtin functions).
10042 The call may contain arguments which need to be evaluated, but
10043 which are not useful to determine the result of the call. In
10044 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10045 COMPOUND_EXPR will be an argument which must be evaluated.
10046 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10047 COMPOUND_EXPR in the chain will contain the tree for the simplified
10048 form of the builtin function call. */
10051 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10053 if (!validate_arg (s1
, POINTER_TYPE
)
10054 || !validate_arg (s2
, POINTER_TYPE
))
10059 const char *p1
, *p2
;
10061 p2
= c_getstr (s2
);
10065 p1
= c_getstr (s1
);
10068 const char *r
= strpbrk (p1
, p2
);
10072 return build_int_cst (TREE_TYPE (s1
), 0);
10074 /* Return an offset into the constant string argument. */
10075 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10076 return fold_convert_loc (loc
, type
, tem
);
10080 /* strpbrk(x, "") == NULL.
10081 Evaluate and ignore s1 in case it had side-effects. */
10082 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
10085 return NULL_TREE
; /* Really call strpbrk. */
10087 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10091 /* New argument list transforming strpbrk(s1, s2) to
10092 strchr(s1, s2[0]). */
10093 return build_call_expr_loc (loc
, fn
, 2, s1
,
10094 build_int_cst (integer_type_node
, p2
[0]));
10098 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10101 Return NULL_TREE if no simplification was possible, otherwise return the
10102 simplified form of the call as a tree.
10104 The simplified form may be a constant or other expression which
10105 computes the same value, but in a more efficient manner (including
10106 calls to other builtin functions).
10108 The call may contain arguments which need to be evaluated, but
10109 which are not useful to determine the result of the call. In
10110 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10111 COMPOUND_EXPR will be an argument which must be evaluated.
10112 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10113 COMPOUND_EXPR in the chain will contain the tree for the simplified
10114 form of the builtin function call. */
10117 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10119 if (!validate_arg (s1
, POINTER_TYPE
)
10120 || !validate_arg (s2
, POINTER_TYPE
))
10124 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10126 /* If either argument is "", return NULL_TREE. */
10127 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10128 /* Evaluate and ignore both arguments in case either one has
10130 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10136 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10139 Return NULL_TREE if no simplification was possible, otherwise return the
10140 simplified form of the call as a tree.
10142 The simplified form may be a constant or other expression which
10143 computes the same value, but in a more efficient manner (including
10144 calls to other builtin functions).
10146 The call may contain arguments which need to be evaluated, but
10147 which are not useful to determine the result of the call. In
10148 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10149 COMPOUND_EXPR will be an argument which must be evaluated.
10150 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10151 COMPOUND_EXPR in the chain will contain the tree for the simplified
10152 form of the builtin function call. */
10155 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10157 if (!validate_arg (s1
, POINTER_TYPE
)
10158 || !validate_arg (s2
, POINTER_TYPE
))
10162 /* If the first argument is "", return NULL_TREE. */
10163 const char *p1
= c_getstr (s1
);
10164 if (p1
&& *p1
== '\0')
10166 /* Evaluate and ignore argument s2 in case it has
10168 return omit_one_operand_loc (loc
, size_type_node
,
10169 size_zero_node
, s2
);
10172 /* If the second argument is "", return __builtin_strlen(s1). */
10173 const char *p2
= c_getstr (s2
);
10174 if (p2
&& *p2
== '\0')
10176 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10178 /* If the replacement _DECL isn't initialized, don't do the
10183 return build_call_expr_loc (loc
, fn
, 1, s1
);
10189 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10190 produced. False otherwise. This is done so that we don't output the error
10191 or warning twice or three times. */
10194 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10196 tree fntype
= TREE_TYPE (current_function_decl
);
10197 int nargs
= call_expr_nargs (exp
);
10199 /* There is good chance the current input_location points inside the
10200 definition of the va_start macro (perhaps on the token for
10201 builtin) in a system header, so warnings will not be emitted.
10202 Use the location in real source code. */
10203 location_t current_location
=
10204 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10207 if (!stdarg_p (fntype
))
10209 error ("%<va_start%> used in function with fixed args");
10215 if (va_start_p
&& (nargs
!= 2))
10217 error ("wrong number of arguments to function %<va_start%>");
10220 arg
= CALL_EXPR_ARG (exp
, 1);
10222 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10223 when we checked the arguments and if needed issued a warning. */
10228 /* Evidently an out of date version of <stdarg.h>; can't validate
10229 va_start's second argument, but can still work as intended. */
10230 warning_at (current_location
,
10232 "%<__builtin_next_arg%> called without an argument");
10235 else if (nargs
> 1)
10237 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10240 arg
= CALL_EXPR_ARG (exp
, 0);
10243 if (TREE_CODE (arg
) == SSA_NAME
)
10244 arg
= SSA_NAME_VAR (arg
);
10246 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10247 or __builtin_next_arg (0) the first time we see it, after checking
10248 the arguments and if needed issuing a warning. */
10249 if (!integer_zerop (arg
))
10251 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10253 /* Strip off all nops for the sake of the comparison. This
10254 is not quite the same as STRIP_NOPS. It does more.
10255 We must also strip off INDIRECT_EXPR for C++ reference
10257 while (CONVERT_EXPR_P (arg
)
10258 || TREE_CODE (arg
) == INDIRECT_REF
)
10259 arg
= TREE_OPERAND (arg
, 0);
10260 if (arg
!= last_parm
)
10262 /* FIXME: Sometimes with the tree optimizers we can get the
10263 not the last argument even though the user used the last
10264 argument. We just warn and set the arg to be the last
10265 argument so that we will get wrong-code because of
10267 warning_at (current_location
,
10269 "second parameter of %<va_start%> not last named argument");
10272 /* Undefined by C99 7.15.1.4p4 (va_start):
10273 "If the parameter parmN is declared with the register storage
10274 class, with a function or array type, or with a type that is
10275 not compatible with the type that results after application of
10276 the default argument promotions, the behavior is undefined."
10278 else if (DECL_REGISTER (arg
))
10280 warning_at (current_location
,
10282 "undefined behavior when second parameter of "
10283 "%<va_start%> is declared with %<register%> storage");
10286 /* We want to verify the second parameter just once before the tree
10287 optimizers are run and then avoid keeping it in the tree,
10288 as otherwise we could warn even for correct code like:
10289 void foo (int i, ...)
10290 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10292 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10294 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10300 /* Expand a call EXP to __builtin_object_size. */
10303 expand_builtin_object_size (tree exp
)
10306 int object_size_type
;
10307 tree fndecl
= get_callee_fndecl (exp
);
10309 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10311 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10313 expand_builtin_trap ();
10317 ost
= CALL_EXPR_ARG (exp
, 1);
10320 if (TREE_CODE (ost
) != INTEGER_CST
10321 || tree_int_cst_sgn (ost
) < 0
10322 || compare_tree_int (ost
, 3) > 0)
10324 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10326 expand_builtin_trap ();
10330 object_size_type
= tree_to_shwi (ost
);
10332 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10335 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10336 FCODE is the BUILT_IN_* to use.
10337 Return NULL_RTX if we failed; the caller should emit a normal call,
10338 otherwise try to get the result in TARGET, if convenient (and in
10339 mode MODE if that's convenient). */
10342 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10343 enum built_in_function fcode
)
10345 if (!validate_arglist (exp
,
10347 fcode
== BUILT_IN_MEMSET_CHK
10348 ? INTEGER_TYPE
: POINTER_TYPE
,
10349 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10352 tree dest
= CALL_EXPR_ARG (exp
, 0);
10353 tree src
= CALL_EXPR_ARG (exp
, 1);
10354 tree len
= CALL_EXPR_ARG (exp
, 2);
10355 tree size
= CALL_EXPR_ARG (exp
, 3);
10357 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
10358 /*str=*/NULL_TREE
, size
);
10360 if (!tree_fits_uhwi_p (size
))
10363 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10365 /* Avoid transforming the checking call to an ordinary one when
10366 an overflow has been detected or when the call couldn't be
10367 validated because the size is not constant. */
10368 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10371 tree fn
= NULL_TREE
;
10372 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10373 mem{cpy,pcpy,move,set} is available. */
10376 case BUILT_IN_MEMCPY_CHK
:
10377 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10379 case BUILT_IN_MEMPCPY_CHK
:
10380 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10382 case BUILT_IN_MEMMOVE_CHK
:
10383 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10385 case BUILT_IN_MEMSET_CHK
:
10386 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10395 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10396 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10397 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10398 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10400 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10404 unsigned int dest_align
= get_pointer_alignment (dest
);
10406 /* If DEST is not a pointer type, call the normal function. */
10407 if (dest_align
== 0)
10410 /* If SRC and DEST are the same (and not volatile), do nothing. */
10411 if (operand_equal_p (src
, dest
, 0))
10415 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10417 /* Evaluate and ignore LEN in case it has side-effects. */
10418 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10419 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10422 expr
= fold_build_pointer_plus (dest
, len
);
10423 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10426 /* __memmove_chk special case. */
10427 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10429 unsigned int src_align
= get_pointer_alignment (src
);
10431 if (src_align
== 0)
10434 /* If src is categorized for a readonly section we can use
10435 normal __memcpy_chk. */
10436 if (readonly_data_expr (src
))
10438 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10441 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10442 dest
, src
, len
, size
);
10443 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10444 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10445 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10452 /* Emit warning if a buffer overflow is detected at compile time. */
10455 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10457 /* The source string. */
10458 tree srcstr
= NULL_TREE
;
10459 /* The size of the destination object. */
10460 tree objsize
= NULL_TREE
;
10461 /* The string that is being concatenated with (as in __strcat_chk)
10462 or null if it isn't. */
10463 tree catstr
= NULL_TREE
;
10464 /* The maximum length of the source sequence in a bounded operation
10465 (such as __strncat_chk) or null if the operation isn't bounded
10466 (such as __strcat_chk). */
10467 tree maxread
= NULL_TREE
;
10468 /* The exact size of the access (such as in __strncpy_chk). */
10469 tree size
= NULL_TREE
;
10473 case BUILT_IN_STRCPY_CHK
:
10474 case BUILT_IN_STPCPY_CHK
:
10475 srcstr
= CALL_EXPR_ARG (exp
, 1);
10476 objsize
= CALL_EXPR_ARG (exp
, 2);
10479 case BUILT_IN_STRCAT_CHK
:
10480 /* For __strcat_chk the warning will be emitted only if overflowing
10481 by at least strlen (dest) + 1 bytes. */
10482 catstr
= CALL_EXPR_ARG (exp
, 0);
10483 srcstr
= CALL_EXPR_ARG (exp
, 1);
10484 objsize
= CALL_EXPR_ARG (exp
, 2);
10487 case BUILT_IN_STRNCAT_CHK
:
10488 catstr
= CALL_EXPR_ARG (exp
, 0);
10489 srcstr
= CALL_EXPR_ARG (exp
, 1);
10490 maxread
= CALL_EXPR_ARG (exp
, 2);
10491 objsize
= CALL_EXPR_ARG (exp
, 3);
10494 case BUILT_IN_STRNCPY_CHK
:
10495 case BUILT_IN_STPNCPY_CHK
:
10496 srcstr
= CALL_EXPR_ARG (exp
, 1);
10497 size
= CALL_EXPR_ARG (exp
, 2);
10498 objsize
= CALL_EXPR_ARG (exp
, 3);
10501 case BUILT_IN_SNPRINTF_CHK
:
10502 case BUILT_IN_VSNPRINTF_CHK
:
10503 maxread
= CALL_EXPR_ARG (exp
, 1);
10504 objsize
= CALL_EXPR_ARG (exp
, 3);
10507 gcc_unreachable ();
10510 if (catstr
&& maxread
)
10512 /* Check __strncat_chk. There is no way to determine the length
10513 of the string to which the source string is being appended so
10514 just warn when the length of the source string is not known. */
10515 check_strncat_sizes (exp
, objsize
);
10519 /* The destination argument is the first one for all built-ins above. */
10520 tree dst
= CALL_EXPR_ARG (exp
, 0);
10522 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
10525 /* Emit warning if a buffer overflow is detected at compile time
10526 in __sprintf_chk/__vsprintf_chk calls. */
10529 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10531 tree size
, len
, fmt
;
10532 const char *fmt_str
;
10533 int nargs
= call_expr_nargs (exp
);
10535 /* Verify the required arguments in the original call. */
10539 size
= CALL_EXPR_ARG (exp
, 2);
10540 fmt
= CALL_EXPR_ARG (exp
, 3);
10542 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10545 /* Check whether the format is a literal string constant. */
10546 fmt_str
= c_getstr (fmt
);
10547 if (fmt_str
== NULL
)
10550 if (!init_target_chars ())
10553 /* If the format doesn't contain % args or %%, we know its size. */
10554 if (strchr (fmt_str
, target_percent
) == 0)
10555 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10556 /* If the format is "%s" and first ... argument is a string literal,
10558 else if (fcode
== BUILT_IN_SPRINTF_CHK
10559 && strcmp (fmt_str
, target_percent_s
) == 0)
10565 arg
= CALL_EXPR_ARG (exp
, 4);
10566 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10569 len
= c_strlen (arg
, 1);
10570 if (!len
|| ! tree_fits_uhwi_p (len
))
10576 /* Add one for the terminating nul. */
10577 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10579 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
10580 /*maxread=*/NULL_TREE
, len
, size
);
10583 /* Emit warning if a free is called with address of a variable. */
10586 maybe_emit_free_warning (tree exp
)
10588 tree arg
= CALL_EXPR_ARG (exp
, 0);
10591 if (TREE_CODE (arg
) != ADDR_EXPR
)
10594 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10595 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10598 if (SSA_VAR_P (arg
))
10599 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10600 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10602 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10603 "%Kattempt to free a non-heap object", exp
);
10606 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10610 fold_builtin_object_size (tree ptr
, tree ost
)
10612 unsigned HOST_WIDE_INT bytes
;
10613 int object_size_type
;
10615 if (!validate_arg (ptr
, POINTER_TYPE
)
10616 || !validate_arg (ost
, INTEGER_TYPE
))
10621 if (TREE_CODE (ost
) != INTEGER_CST
10622 || tree_int_cst_sgn (ost
) < 0
10623 || compare_tree_int (ost
, 3) > 0)
10626 object_size_type
= tree_to_shwi (ost
);
10628 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10629 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10630 and (size_t) 0 for types 2 and 3. */
10631 if (TREE_SIDE_EFFECTS (ptr
))
10632 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10634 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10636 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10637 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10638 return build_int_cstu (size_type_node
, bytes
);
10640 else if (TREE_CODE (ptr
) == SSA_NAME
)
10642 /* If object size is not known yet, delay folding until
10643 later. Maybe subsequent passes will help determining
10645 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10646 && wi::fits_to_tree_p (bytes
, size_type_node
))
10647 return build_int_cstu (size_type_node
, bytes
);
10653 /* Builtins with folding operations that operate on "..." arguments
10654 need special handling; we need to store the arguments in a convenient
10655 data structure before attempting any folding. Fortunately there are
10656 only a few builtins that fall into this category. FNDECL is the
10657 function, EXP is the CALL_EXPR for the call. */
10660 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10662 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10663 tree ret
= NULL_TREE
;
10667 case BUILT_IN_FPCLASSIFY
:
10668 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10676 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10677 SET_EXPR_LOCATION (ret
, loc
);
10678 TREE_NO_WARNING (ret
) = 1;
10684 /* Initialize format string characters in the target charset. */
10687 init_target_chars (void)
10692 target_newline
= lang_hooks
.to_target_charset ('\n');
10693 target_percent
= lang_hooks
.to_target_charset ('%');
10694 target_c
= lang_hooks
.to_target_charset ('c');
10695 target_s
= lang_hooks
.to_target_charset ('s');
10696 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10700 target_percent_c
[0] = target_percent
;
10701 target_percent_c
[1] = target_c
;
10702 target_percent_c
[2] = '\0';
10704 target_percent_s
[0] = target_percent
;
10705 target_percent_s
[1] = target_s
;
10706 target_percent_s
[2] = '\0';
10708 target_percent_s_newline
[0] = target_percent
;
10709 target_percent_s_newline
[1] = target_s
;
10710 target_percent_s_newline
[2] = target_newline
;
10711 target_percent_s_newline
[3] = '\0';
10718 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10719 and no overflow/underflow occurred. INEXACT is true if M was not
10720 exactly calculated. TYPE is the tree type for the result. This
10721 function assumes that you cleared the MPFR flags and then
10722 calculated M to see if anything subsequently set a flag prior to
10723 entering this function. Return NULL_TREE if any checks fail. */
10726 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10728 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10729 overflow/underflow occurred. If -frounding-math, proceed iff the
10730 result of calling FUNC was exact. */
10731 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10732 && (!flag_rounding_math
|| !inexact
))
10734 REAL_VALUE_TYPE rr
;
10736 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10737 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10738 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10739 but the mpft_t is not, then we underflowed in the
10741 if (real_isfinite (&rr
)
10742 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10744 REAL_VALUE_TYPE rmode
;
10746 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10747 /* Proceed iff the specified mode can hold the value. */
10748 if (real_identical (&rmode
, &rr
))
10749 return build_real (type
, rmode
);
10755 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10756 number and no overflow/underflow occurred. INEXACT is true if M
10757 was not exactly calculated. TYPE is the tree type for the result.
10758 This function assumes that you cleared the MPFR flags and then
10759 calculated M to see if anything subsequently set a flag prior to
10760 entering this function. Return NULL_TREE if any checks fail, if
10761 FORCE_CONVERT is true, then bypass the checks. */
10764 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10766 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10767 overflow/underflow occurred. If -frounding-math, proceed iff the
10768 result of calling FUNC was exact. */
10770 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10771 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10772 && (!flag_rounding_math
|| !inexact
)))
10774 REAL_VALUE_TYPE re
, im
;
10776 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10777 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10778 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10779 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10780 but the mpft_t is not, then we underflowed in the
10783 || (real_isfinite (&re
) && real_isfinite (&im
)
10784 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10785 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10787 REAL_VALUE_TYPE re_mode
, im_mode
;
10789 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10790 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10791 /* Proceed iff the specified mode can hold the value. */
10793 || (real_identical (&re_mode
, &re
)
10794 && real_identical (&im_mode
, &im
)))
10795 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10796 build_real (TREE_TYPE (type
), im_mode
));
10802 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10803 the pointer *(ARG_QUO) and return the result. The type is taken
10804 from the type of ARG0 and is used for setting the precision of the
10805 calculation and results. */
10808 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10810 tree
const type
= TREE_TYPE (arg0
);
10811 tree result
= NULL_TREE
;
10816 /* To proceed, MPFR must exactly represent the target floating point
10817 format, which only happens when the target base equals two. */
10818 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10819 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10820 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10822 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10823 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10825 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10827 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10828 const int prec
= fmt
->p
;
10829 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10834 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10835 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10836 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10837 mpfr_clear_flags ();
10838 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10839 /* Remquo is independent of the rounding mode, so pass
10840 inexact=0 to do_mpfr_ckconv(). */
10841 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10842 mpfr_clears (m0
, m1
, NULL
);
10845 /* MPFR calculates quo in the host's long so it may
10846 return more bits in quo than the target int can hold
10847 if sizeof(host long) > sizeof(target int). This can
10848 happen even for native compilers in LP64 mode. In
10849 these cases, modulo the quo value with the largest
10850 number that the target int can hold while leaving one
10851 bit for the sign. */
10852 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10853 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10855 /* Dereference the quo pointer argument. */
10856 arg_quo
= build_fold_indirect_ref (arg_quo
);
10857 /* Proceed iff a valid pointer type was passed in. */
10858 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10860 /* Set the value. */
10862 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10863 build_int_cst (TREE_TYPE (arg_quo
),
10865 TREE_SIDE_EFFECTS (result_quo
) = 1;
10866 /* Combine the quo assignment with the rem. */
10867 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10868 result_quo
, result_rem
));
10876 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10877 resulting value as a tree with type TYPE. The mpfr precision is
10878 set to the precision of TYPE. We assume that this mpfr function
10879 returns zero if the result could be calculated exactly within the
10880 requested precision. In addition, the integer pointer represented
10881 by ARG_SG will be dereferenced and set to the appropriate signgam
10885 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10887 tree result
= NULL_TREE
;
10891 /* To proceed, MPFR must exactly represent the target floating point
10892 format, which only happens when the target base equals two. Also
10893 verify ARG is a constant and that ARG_SG is an int pointer. */
10894 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10895 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10896 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10897 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10899 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10901 /* In addition to NaN and Inf, the argument cannot be zero or a
10902 negative integer. */
10903 if (real_isfinite (ra
)
10904 && ra
->cl
!= rvc_zero
10905 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10907 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10908 const int prec
= fmt
->p
;
10909 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10914 mpfr_init2 (m
, prec
);
10915 mpfr_from_real (m
, ra
, GMP_RNDN
);
10916 mpfr_clear_flags ();
10917 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10918 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10924 /* Dereference the arg_sg pointer argument. */
10925 arg_sg
= build_fold_indirect_ref (arg_sg
);
10926 /* Assign the signgam value into *arg_sg. */
10927 result_sg
= fold_build2 (MODIFY_EXPR
,
10928 TREE_TYPE (arg_sg
), arg_sg
,
10929 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10930 TREE_SIDE_EFFECTS (result_sg
) = 1;
10931 /* Combine the signgam assignment with the lgamma result. */
10932 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10933 result_sg
, result_lg
));
10941 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10942 mpc function FUNC on it and return the resulting value as a tree
10943 with type TYPE. The mpfr precision is set to the precision of
10944 TYPE. We assume that function FUNC returns zero if the result
10945 could be calculated exactly within the requested precision. If
10946 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10947 in the arguments and/or results. */
10950 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10951 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10953 tree result
= NULL_TREE
;
10958 /* To proceed, MPFR must exactly represent the target floating point
10959 format, which only happens when the target base equals two. */
10960 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10962 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10964 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10966 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10967 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10968 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10969 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10972 || (real_isfinite (re0
) && real_isfinite (im0
)
10973 && real_isfinite (re1
) && real_isfinite (im1
)))
10975 const struct real_format
*const fmt
=
10976 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10977 const int prec
= fmt
->p
;
10978 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10979 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10983 mpc_init2 (m0
, prec
);
10984 mpc_init2 (m1
, prec
);
10985 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10986 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10987 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10988 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10989 mpfr_clear_flags ();
10990 inexact
= func (m0
, m0
, m1
, crnd
);
10991 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11000 /* A wrapper function for builtin folding that prevents warnings for
11001 "statement without effect" and the like, caused by removing the
11002 call node earlier than the warning is generated. */
11005 fold_call_stmt (gcall
*stmt
, bool ignore
)
11007 tree ret
= NULL_TREE
;
11008 tree fndecl
= gimple_call_fndecl (stmt
);
11009 location_t loc
= gimple_location (stmt
);
11010 if (fndecl
&& fndecl_built_in_p (fndecl
)
11011 && !gimple_call_va_arg_pack_p (stmt
))
11013 int nargs
= gimple_call_num_args (stmt
);
11014 tree
*args
= (nargs
> 0
11015 ? gimple_call_arg_ptr (stmt
, 0)
11016 : &error_mark_node
);
11018 if (avoid_folding_inline_builtin (fndecl
))
11020 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11022 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11026 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11029 /* Propagate location information from original call to
11030 expansion of builtin. Otherwise things like
11031 maybe_emit_chk_warning, that operate on the expansion
11032 of a builtin, will use the wrong location information. */
11033 if (gimple_has_location (stmt
))
11035 tree realret
= ret
;
11036 if (TREE_CODE (ret
) == NOP_EXPR
)
11037 realret
= TREE_OPERAND (ret
, 0);
11038 if (CAN_HAVE_LOCATION_P (realret
)
11039 && !EXPR_HAS_LOCATION (realret
))
11040 SET_EXPR_LOCATION (realret
, loc
);
11050 /* Look up the function in builtin_decl that corresponds to DECL
11051 and set ASMSPEC as its user assembler name. DECL must be a
11052 function decl that declares a builtin. */
11055 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11057 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
11060 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11061 set_user_assembler_name (builtin
, asmspec
);
11063 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
11064 && INT_TYPE_SIZE
< BITS_PER_WORD
)
11066 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
11067 set_user_assembler_libfunc ("ffs", asmspec
);
11068 set_optab_libfunc (ffs_optab
, mode
, "ffs");
11072 /* Return true if DECL is a builtin that expands to a constant or similarly
11075 is_simple_builtin (tree decl
)
11077 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
11078 switch (DECL_FUNCTION_CODE (decl
))
11080 /* Builtins that expand to constants. */
11081 case BUILT_IN_CONSTANT_P
:
11082 case BUILT_IN_EXPECT
:
11083 case BUILT_IN_OBJECT_SIZE
:
11084 case BUILT_IN_UNREACHABLE
:
11085 /* Simple register moves or loads from stack. */
11086 case BUILT_IN_ASSUME_ALIGNED
:
11087 case BUILT_IN_RETURN_ADDRESS
:
11088 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11089 case BUILT_IN_FROB_RETURN_ADDR
:
11090 case BUILT_IN_RETURN
:
11091 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11092 case BUILT_IN_FRAME_ADDRESS
:
11093 case BUILT_IN_VA_END
:
11094 case BUILT_IN_STACK_SAVE
:
11095 case BUILT_IN_STACK_RESTORE
:
11096 /* Exception state returns or moves registers around. */
11097 case BUILT_IN_EH_FILTER
:
11098 case BUILT_IN_EH_POINTER
:
11099 case BUILT_IN_EH_COPY_VALUES
:
11109 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11110 most probably expanded inline into reasonably simple code. This is a
11111 superset of is_simple_builtin. */
11113 is_inexpensive_builtin (tree decl
)
11117 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11119 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11120 switch (DECL_FUNCTION_CODE (decl
))
11123 CASE_BUILT_IN_ALLOCA
:
11124 case BUILT_IN_BSWAP16
:
11125 case BUILT_IN_BSWAP32
:
11126 case BUILT_IN_BSWAP64
:
11128 case BUILT_IN_CLZIMAX
:
11129 case BUILT_IN_CLZL
:
11130 case BUILT_IN_CLZLL
:
11132 case BUILT_IN_CTZIMAX
:
11133 case BUILT_IN_CTZL
:
11134 case BUILT_IN_CTZLL
:
11136 case BUILT_IN_FFSIMAX
:
11137 case BUILT_IN_FFSL
:
11138 case BUILT_IN_FFSLL
:
11139 case BUILT_IN_IMAXABS
:
11140 case BUILT_IN_FINITE
:
11141 case BUILT_IN_FINITEF
:
11142 case BUILT_IN_FINITEL
:
11143 case BUILT_IN_FINITED32
:
11144 case BUILT_IN_FINITED64
:
11145 case BUILT_IN_FINITED128
:
11146 case BUILT_IN_FPCLASSIFY
:
11147 case BUILT_IN_ISFINITE
:
11148 case BUILT_IN_ISINF_SIGN
:
11149 case BUILT_IN_ISINF
:
11150 case BUILT_IN_ISINFF
:
11151 case BUILT_IN_ISINFL
:
11152 case BUILT_IN_ISINFD32
:
11153 case BUILT_IN_ISINFD64
:
11154 case BUILT_IN_ISINFD128
:
11155 case BUILT_IN_ISNAN
:
11156 case BUILT_IN_ISNANF
:
11157 case BUILT_IN_ISNANL
:
11158 case BUILT_IN_ISNAND32
:
11159 case BUILT_IN_ISNAND64
:
11160 case BUILT_IN_ISNAND128
:
11161 case BUILT_IN_ISNORMAL
:
11162 case BUILT_IN_ISGREATER
:
11163 case BUILT_IN_ISGREATEREQUAL
:
11164 case BUILT_IN_ISLESS
:
11165 case BUILT_IN_ISLESSEQUAL
:
11166 case BUILT_IN_ISLESSGREATER
:
11167 case BUILT_IN_ISUNORDERED
:
11168 case BUILT_IN_VA_ARG_PACK
:
11169 case BUILT_IN_VA_ARG_PACK_LEN
:
11170 case BUILT_IN_VA_COPY
:
11171 case BUILT_IN_TRAP
:
11172 case BUILT_IN_SAVEREGS
:
11173 case BUILT_IN_POPCOUNTL
:
11174 case BUILT_IN_POPCOUNTLL
:
11175 case BUILT_IN_POPCOUNTIMAX
:
11176 case BUILT_IN_POPCOUNT
:
11177 case BUILT_IN_PARITYL
:
11178 case BUILT_IN_PARITYLL
:
11179 case BUILT_IN_PARITYIMAX
:
11180 case BUILT_IN_PARITY
:
11181 case BUILT_IN_LABS
:
11182 case BUILT_IN_LLABS
:
11183 case BUILT_IN_PREFETCH
:
11184 case BUILT_IN_ACC_ON_DEVICE
:
11188 return is_simple_builtin (decl
);
11194 /* Return true if T is a constant and the value cast to a target char
11195 can be represented by a host char.
11196 Store the casted char constant in *P if so. */
11199 target_char_cst_p (tree t
, char *p
)
11201 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
11204 *p
= (char)tree_to_uhwi (t
);
11208 /* Return the maximum object size. */
11211 max_object_size (void)
11213 /* To do: Make this a configurable parameter. */
11214 return TYPE_MAX_VALUE (ptrdiff_type_node
);