1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
36 #include "stringpool.h"
38 #include "tree-ssanames.h"
43 #include "diagnostic-core.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
51 #include "tree-object-size.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins
;
78 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names
[BUILT_IN_LAST
]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names
[(int) END_BUILTINS
] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info
[(int)END_BUILTINS
];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p
;
98 static rtx
c_readstr (const char *, scalar_int_mode
);
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
114 static rtx
expand_builtin_sincos (tree
);
115 static rtx
expand_builtin_cexpi (tree
, rtx
);
116 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
117 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
118 static rtx
expand_builtin_next_arg (void);
119 static rtx
expand_builtin_va_start (tree
);
120 static rtx
expand_builtin_va_end (tree
);
121 static rtx
expand_builtin_va_copy (tree
);
122 static rtx
inline_expand_builtin_string_cmp (tree
, rtx
);
123 static rtx
expand_builtin_strcmp (tree
, rtx
);
124 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
125 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
126 static rtx
expand_builtin_memchr (tree
, rtx
);
127 static rtx
expand_builtin_memcpy (tree
, rtx
);
128 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
129 rtx target
, tree exp
,
131 static rtx
expand_builtin_memmove (tree
, rtx
);
132 static rtx
expand_builtin_mempcpy (tree
, rtx
);
133 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, memop_ret
);
134 static rtx
expand_builtin_strcat (tree
, rtx
);
135 static rtx
expand_builtin_strcpy (tree
, rtx
);
136 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
137 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_stpncpy (tree
, rtx
);
139 static rtx
expand_builtin_strncat (tree
, rtx
);
140 static rtx
expand_builtin_strncpy (tree
, rtx
);
141 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
142 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
143 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
144 static rtx
expand_builtin_bzero (tree
);
145 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
147 static rtx
expand_builtin_alloca (tree
);
148 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
149 static rtx
expand_builtin_frame_address (tree
, tree
);
150 static tree
stabilize_va_list_loc (location_t
, tree
, int);
151 static rtx
expand_builtin_expect (tree
, rtx
);
152 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
153 static tree
fold_builtin_constant_p (tree
);
154 static tree
fold_builtin_classify_type (tree
);
155 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
156 static tree
fold_builtin_inf (location_t
, tree
, int);
157 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
158 static bool validate_arg (const_tree
, enum tree_code code
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
162 static tree
fold_builtin_isascii (location_t
, tree
);
163 static tree
fold_builtin_toascii (location_t
, tree
);
164 static tree
fold_builtin_isdigit (location_t
, tree
);
165 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
166 static tree
fold_builtin_abs (location_t
, tree
, tree
);
167 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
169 static tree
fold_builtin_0 (location_t
, tree
);
170 static tree
fold_builtin_1 (location_t
, tree
, tree
);
171 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
173 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
175 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
176 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
177 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
179 static rtx
expand_builtin_object_size (tree
);
180 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
181 enum built_in_function
);
182 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
183 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
184 static void maybe_emit_free_warning (tree
);
185 static tree
fold_builtin_object_size (tree
, tree
);
187 unsigned HOST_WIDE_INT target_newline
;
188 unsigned HOST_WIDE_INT target_percent
;
189 static unsigned HOST_WIDE_INT target_c
;
190 static unsigned HOST_WIDE_INT target_s
;
191 char target_percent_c
[3];
192 char target_percent_s
[3];
193 char target_percent_s_newline
[4];
194 static tree
do_mpfr_remquo (tree
, tree
, tree
);
195 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
201 is_builtin_name (const char *name
)
203 if (strncmp (name
, "__builtin_", 10) == 0)
205 if (strncmp (name
, "__sync_", 7) == 0)
207 if (strncmp (name
, "__atomic_", 9) == 0)
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
217 called_as_built_in (tree node
)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
222 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
223 return is_builtin_name (name
);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
242 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
243 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
245 poly_int64 bitsize
, bitpos
;
248 int unsignedp
, reversep
, volatilep
;
249 unsigned int align
= BITS_PER_UNIT
;
250 bool known_alignment
= false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
255 &unsignedp
, &reversep
, &volatilep
);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp
) == FUNCTION_DECL
)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
266 align
= 2 * BITS_PER_UNIT
;
268 else if (TREE_CODE (exp
) == LABEL_DECL
)
270 else if (TREE_CODE (exp
) == CONST_DECL
)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp
= DECL_INITIAL (exp
);
274 align
= TYPE_ALIGN (TREE_TYPE (exp
));
275 if (CONSTANT_CLASS_P (exp
))
276 align
= targetm
.constant_alignment (exp
, align
);
278 known_alignment
= true;
280 else if (DECL_P (exp
))
282 align
= DECL_ALIGN (exp
);
283 known_alignment
= true;
285 else if (TREE_CODE (exp
) == INDIRECT_REF
286 || TREE_CODE (exp
) == MEM_REF
287 || TREE_CODE (exp
) == TARGET_MEM_REF
)
289 tree addr
= TREE_OPERAND (exp
, 0);
291 unsigned HOST_WIDE_INT ptr_bitpos
;
292 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr
) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
298 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
299 ptr_bitmask
*= BITS_PER_UNIT
;
300 align
= least_bit_hwi (ptr_bitmask
);
301 addr
= TREE_OPERAND (addr
, 0);
305 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
306 align
= MAX (ptr_align
, align
);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos
&= ptr_bitmask
;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
317 unsigned HOST_WIDE_INT step
= 1;
319 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
320 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
322 if (TMR_INDEX2 (exp
))
323 align
= BITS_PER_UNIT
;
324 known_alignment
= false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
333 if (!addr_p
&& !known_alignment
334 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
339 /* Else adjust bitpos accordingly. */
340 bitpos
+= ptr_bitpos
;
341 if (TREE_CODE (exp
) == MEM_REF
342 || TREE_CODE (exp
) == TARGET_MEM_REF
)
343 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
346 else if (TREE_CODE (exp
) == STRING_CST
)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align
= TYPE_ALIGN (TREE_TYPE (exp
));
351 if (CONSTANT_CLASS_P (exp
))
352 align
= targetm
.constant_alignment (exp
, align
);
354 known_alignment
= true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
361 unsigned int trailing_zeros
= tree_ctz (offset
);
362 if (trailing_zeros
< HOST_BITS_PER_INT
)
364 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
366 align
= MIN (align
, inner
);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
373 if (alt_align
!= 0 && alt_align
< align
)
376 known_alignment
= false;
380 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
381 return known_alignment
;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
391 unsigned HOST_WIDE_INT
*bitposp
)
393 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
396 /* Return the alignment in bits of EXP, an object. */
399 get_object_alignment (tree exp
)
401 unsigned HOST_WIDE_INT bitpos
= 0;
404 get_object_alignment_1 (exp
, &align
, &bitpos
);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
410 align
= least_bit_hwi (bitpos
);
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
422 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
423 unsigned HOST_WIDE_INT
*bitposp
)
427 if (TREE_CODE (exp
) == ADDR_EXPR
)
428 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
429 alignp
, bitposp
, true);
430 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
433 unsigned HOST_WIDE_INT bitpos
;
434 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
436 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
437 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
440 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
441 if (trailing_zeros
< HOST_BITS_PER_INT
)
443 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
445 align
= MIN (align
, inner
);
449 *bitposp
= bitpos
& (align
- 1);
452 else if (TREE_CODE (exp
) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp
)))
455 unsigned int ptr_align
, ptr_misalign
;
456 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
458 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
460 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
461 *alignp
= ptr_align
* BITS_PER_UNIT
;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
465 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
466 /* We cannot really tell whether this result is an approximation. */
472 *alignp
= BITS_PER_UNIT
;
476 else if (TREE_CODE (exp
) == INTEGER_CST
)
478 *alignp
= BIGGEST_ALIGNMENT
;
479 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
480 & (BIGGEST_ALIGNMENT
- 1));
485 *alignp
= BITS_PER_UNIT
;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
497 get_pointer_alignment (tree exp
)
499 unsigned HOST_WIDE_INT bitpos
= 0;
502 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
508 align
= least_bit_hwi (bitpos
);
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
520 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
526 /* Optimize the common case of plain char. */
527 for (n
= 0; n
< maxelts
; n
++)
529 const char *elt
= (const char*) ptr
+ n
;
536 for (n
= 0; n
< maxelts
; n
++)
538 const char *elt
= (const char*) ptr
+ n
* eltsize
;
539 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
551 warn_string_no_nul (location_t loc
, const char *fn
, tree arg
, tree decl
)
553 if (TREE_NO_WARNING (arg
))
556 loc
= expansion_point_location_if_in_system_header (loc
);
558 if (warning_at (loc
, OPT_Wstringop_overflow_
,
559 "%qs argument missing terminating nul", fn
))
561 inform (DECL_SOURCE_LOCATION (decl
),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg
) = 1;
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
574 unterminated_array (tree exp
, tree
*size
/* = NULL */, bool *exact
/* = NULL */)
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
579 memset (&data
, 0, sizeof (c_strlen_data
));
580 tree len
= c_strlen (exp
, 1, &data
);
581 if (len
== NULL_TREE
&& data
.len
&& data
.decl
)
588 /* Constant offsets are already accounted for in data.len, but
589 not in a SSA_NAME + CST expression. */
590 if (TREE_CODE (data
.off
) == INTEGER_CST
)
592 else if (TREE_CODE (data
.off
) == PLUS_EXPR
593 && TREE_CODE (TREE_OPERAND (data
.off
, 1)) == INTEGER_CST
)
595 /* Subtract the offset from the size of the array. */
597 tree temp
= TREE_OPERAND (data
.off
, 1);
598 temp
= fold_convert (ssizetype
, temp
);
599 len
= fold_build2 (MINUS_EXPR
, ssizetype
, len
, temp
);
615 /* Compute the length of a null-terminated character string or wide
616 character string handling character sizes of 1, 2, and 4 bytes.
617 TREE_STRING_LENGTH is not the right way because it evaluates to
618 the size of the character array in bytes (as opposed to characters)
619 and because it can contain a zero byte in the middle.
621 ONLY_VALUE should be nonzero if the result is not going to be emitted
622 into the instruction stream and zero if it is going to be expanded.
623 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
624 is returned, otherwise NULL, since
625 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
626 evaluate the side-effects.
628 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
629 accesses. Note that this implies the result is not going to be emitted
630 into the instruction stream.
632 Additional information about the string accessed may be recorded
633 in DATA. For example, if SRC references an unterminated string,
634 then the declaration will be stored in the DECL field. If the
635 length of the unterminated string can be determined, it'll be
636 stored in the LEN field. Note this length could well be different
637 than what a C strlen call would return.
639 ELTSIZE is 1 for normal single byte character strings, and 2 or
640 4 for wide characer strings. ELTSIZE is by default 1.
642 The value returned is of type `ssizetype'. */
645 c_strlen (tree src
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
647 /* If we were not passed a DATA pointer, then get one to a local
648 structure. That avoids having to check DATA for NULL before
649 each time we want to use it. */
650 c_strlen_data local_strlen_data
;
651 memset (&local_strlen_data
, 0, sizeof (c_strlen_data
));
653 data
= &local_strlen_data
;
655 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
657 if (TREE_CODE (src
) == COND_EXPR
658 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
662 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
663 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
664 if (tree_int_cst_equal (len1
, len2
))
668 if (TREE_CODE (src
) == COMPOUND_EXPR
669 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
670 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
672 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
674 /* Offset from the beginning of the string in bytes. */
678 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
682 /* Determine the size of the string element. */
683 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
686 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
687 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
688 in case the latter is less than the size of the array, such as when
689 SRC refers to a short string literal used to initialize a large array.
690 In that case, the elements of the array after the terminating NUL are
692 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
693 strelts
= strelts
/ eltsize
;
695 if (!tree_fits_uhwi_p (memsize
))
698 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
700 /* PTR can point to the byte representation of any string type, including
701 char* and wchar_t*. */
702 const char *ptr
= TREE_STRING_POINTER (src
);
704 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
706 /* The code below works only for single byte character types. */
710 /* If the string has an internal NUL character followed by any
711 non-NUL characters (e.g., "foo\0bar"), we can't compute
712 the offset to the following NUL if we don't know where to
713 start searching for it. */
714 unsigned len
= string_length (ptr
, eltsize
, strelts
);
716 /* Return when an embedded null character is found or none at all.
717 In the latter case, set the DECL/LEN field in the DATA structure
718 so that callers may examine them. */
719 if (len
+ 1 < strelts
)
721 else if (len
>= maxelts
)
725 data
->len
= ssize_int (len
);
729 /* For empty strings the result should be zero. */
731 return ssize_int (0);
733 /* We don't know the starting offset, but we do know that the string
734 has no internal zero bytes. If the offset falls within the bounds
735 of the string subtract the offset from the length of the string,
736 and return that. Otherwise the length is zero. Take care to
737 use SAVE_EXPR in case the OFFSET has side-effects. */
738 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
740 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
741 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
743 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
745 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
746 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
747 build_zero_cst (ssizetype
));
750 /* Offset from the beginning of the string in elements. */
751 HOST_WIDE_INT eltoff
;
753 /* We have a known offset into the string. Start searching there for
754 a null character if we can represent it as a single HOST_WIDE_INT. */
757 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
760 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
762 /* If the offset is known to be out of bounds, warn, and call strlen at
764 if (eltoff
< 0 || eltoff
>= maxelts
)
766 /* Suppress multiple warnings for propagated constant strings. */
768 && !TREE_NO_WARNING (src
))
770 warning_at (loc
, OPT_Warray_bounds
,
771 "offset %qwi outside bounds of constant string",
773 TREE_NO_WARNING (src
) = 1;
778 /* If eltoff is larger than strelts but less than maxelts the
779 string length is zero, since the excess memory will be zero. */
780 if (eltoff
> strelts
)
781 return ssize_int (0);
783 /* Use strlen to search for the first zero byte. Since any strings
784 constructed with build_string will have nulls appended, we win even
785 if we get handed something like (char[4])"abcd".
787 Since ELTOFF is our starting index into the string, no further
788 calculation is needed. */
789 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
792 /* Don't know what to return if there was no zero termination.
793 Ideally this would turn into a gcc_checking_assert over time.
794 Set DECL/LEN so callers can examine them. */
795 if (len
>= maxelts
- eltoff
)
799 data
->len
= ssize_int (len
);
803 return ssize_int (len
);
806 /* Return a constant integer corresponding to target reading
807 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
810 c_readstr (const char *str
, scalar_int_mode mode
)
814 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
816 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
817 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
818 / HOST_BITS_PER_WIDE_INT
;
820 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
821 for (i
= 0; i
< len
; i
++)
825 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
828 if (WORDS_BIG_ENDIAN
)
829 j
= GET_MODE_SIZE (mode
) - i
- 1;
830 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
831 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
832 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
836 ch
= (unsigned char) str
[i
];
837 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
840 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
841 return immed_wide_int_const (c
, mode
);
844 /* Cast a target constant CST to target CHAR and if that value fits into
845 host char type, return zero and put that value into variable pointed to by
849 target_char_cast (tree cst
, char *p
)
851 unsigned HOST_WIDE_INT val
, hostval
;
853 if (TREE_CODE (cst
) != INTEGER_CST
854 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
857 /* Do not care if it fits or not right here. */
858 val
= TREE_INT_CST_LOW (cst
);
860 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
861 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
864 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
865 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
874 /* Similar to save_expr, but assumes that arbitrary code is not executed
875 in between the multiple evaluations. In particular, we assume that a
876 non-addressable local variable will not be modified. */
879 builtin_save_expr (tree exp
)
881 if (TREE_CODE (exp
) == SSA_NAME
882 || (TREE_ADDRESSABLE (exp
) == 0
883 && (TREE_CODE (exp
) == PARM_DECL
884 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
887 return save_expr (exp
);
890 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
891 times to get the address of either a higher stack frame, or a return
892 address located within it (depending on FNDECL_CODE). */
895 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
898 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
901 /* For a zero count with __builtin_return_address, we don't care what
902 frame address we return, because target-specific definitions will
903 override us. Therefore frame pointer elimination is OK, and using
904 the soft frame pointer is OK.
906 For a nonzero count, or a zero count with __builtin_frame_address,
907 we require a stable offset from the current frame pointer to the
908 previous one, so we must use the hard frame pointer, and
909 we must disable frame pointer elimination. */
910 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
911 tem
= frame_pointer_rtx
;
914 tem
= hard_frame_pointer_rtx
;
916 /* Tell reload not to eliminate the frame pointer. */
917 crtl
->accesses_prior_frames
= 1;
922 SETUP_FRAME_ADDRESSES ();
924 /* On the SPARC, the return address is not in the frame, it is in a
925 register. There is no way to access it off of the current frame
926 pointer, but it can be accessed off the previous frame pointer by
927 reading the value from the register window save area. */
928 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
931 /* Scan back COUNT frames to the specified frame. */
932 for (i
= 0; i
< count
; i
++)
934 /* Assume the dynamic chain pointer is in the word that the
935 frame address points to, unless otherwise specified. */
936 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
937 tem
= memory_address (Pmode
, tem
);
938 tem
= gen_frame_mem (Pmode
, tem
);
939 tem
= copy_to_reg (tem
);
942 /* For __builtin_frame_address, return what we've got. But, on
943 the SPARC for example, we may have to add a bias. */
944 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
945 return FRAME_ADDR_RTX (tem
);
947 /* For __builtin_return_address, get the return address from that frame. */
948 #ifdef RETURN_ADDR_RTX
949 tem
= RETURN_ADDR_RTX (count
, tem
);
951 tem
= memory_address (Pmode
,
952 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
953 tem
= gen_frame_mem (Pmode
, tem
);
958 /* Alias set used for setjmp buffer. */
959 static alias_set_type setjmp_alias_set
= -1;
961 /* Construct the leading half of a __builtin_setjmp call. Control will
962 return to RECEIVER_LABEL. This is also called directly by the SJLJ
963 exception handling code. */
966 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
968 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
972 if (setjmp_alias_set
== -1)
973 setjmp_alias_set
= new_alias_set ();
975 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
977 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
979 /* We store the frame pointer and the address of receiver_label in
980 the buffer and use the rest of it for the stack save area, which
981 is machine-dependent. */
983 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
984 set_mem_alias_set (mem
, setjmp_alias_set
);
985 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
987 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
988 GET_MODE_SIZE (Pmode
))),
989 set_mem_alias_set (mem
, setjmp_alias_set
);
991 emit_move_insn (validize_mem (mem
),
992 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
994 stack_save
= gen_rtx_MEM (sa_mode
,
995 plus_constant (Pmode
, buf_addr
,
996 2 * GET_MODE_SIZE (Pmode
)));
997 set_mem_alias_set (stack_save
, setjmp_alias_set
);
998 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1000 /* If there is further processing to do, do it. */
1001 if (targetm
.have_builtin_setjmp_setup ())
1002 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
1004 /* We have a nonlocal label. */
1005 cfun
->has_nonlocal_label
= 1;
1008 /* Construct the trailing part of a __builtin_setjmp call. This is
1009 also called directly by the SJLJ exception handling code.
1010 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1013 expand_builtin_setjmp_receiver (rtx receiver_label
)
1017 /* Mark the FP as used when we get here, so we have to make sure it's
1018 marked as used by this function. */
1019 emit_use (hard_frame_pointer_rtx
);
1021 /* Mark the static chain as clobbered here so life information
1022 doesn't get messed up for it. */
1023 chain
= rtx_for_static_chain (current_function_decl
, true);
1024 if (chain
&& REG_P (chain
))
1025 emit_clobber (chain
);
1027 /* Now put in the code to restore the frame pointer, and argument
1028 pointer, if needed. */
1029 if (! targetm
.have_nonlocal_goto ())
1031 /* First adjust our frame pointer to its actual value. It was
1032 previously set to the start of the virtual area corresponding to
1033 the stacked variables when we branched here and now needs to be
1034 adjusted to the actual hardware fp value.
1036 Assignments to virtual registers are converted by
1037 instantiate_virtual_regs into the corresponding assignment
1038 to the underlying register (fp in this case) that makes
1039 the original assignment true.
1040 So the following insn will actually be decrementing fp by
1041 TARGET_STARTING_FRAME_OFFSET. */
1042 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
1044 /* Restoring the frame pointer also modifies the hard frame pointer.
1045 Mark it used (so that the previous assignment remains live once
1046 the frame pointer is eliminated) and clobbered (to represent the
1047 implicit update from the assignment). */
1048 emit_use (hard_frame_pointer_rtx
);
1049 emit_clobber (hard_frame_pointer_rtx
);
1052 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
1054 /* If the argument pointer can be eliminated in favor of the
1055 frame pointer, we don't need to restore it. We assume here
1056 that if such an elimination is present, it can always be used.
1057 This is the case on all known machines; if we don't make this
1058 assumption, we do unnecessary saving on many machines. */
1060 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1062 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1063 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1064 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1067 if (i
== ARRAY_SIZE (elim_regs
))
1069 /* Now restore our arg pointer from the address at which it
1070 was saved in our stack frame. */
1071 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1072 copy_to_reg (get_arg_pointer_save_area ()));
1076 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
1077 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
1078 else if (targetm
.have_nonlocal_goto_receiver ())
1079 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
1083 /* We must not allow the code we just generated to be reordered by
1084 scheduling. Specifically, the update of the frame pointer must
1085 happen immediately, not later. */
1086 emit_insn (gen_blockage ());
1089 /* __builtin_longjmp is passed a pointer to an array of five words (not
1090 all will be used on all machines). It operates similarly to the C
1091 library function of the same name, but is more efficient. Much of
1092 the code below is copied from the handling of non-local gotos. */
1095 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1098 rtx_insn
*insn
, *last
;
1099 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1101 /* DRAP is needed for stack realign if longjmp is expanded to current
1103 if (SUPPORTS_STACK_ALIGNMENT
)
1104 crtl
->need_drap
= true;
1106 if (setjmp_alias_set
== -1)
1107 setjmp_alias_set
= new_alias_set ();
1109 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1111 buf_addr
= force_reg (Pmode
, buf_addr
);
1113 /* We require that the user must pass a second argument of 1, because
1114 that is what builtin_setjmp will return. */
1115 gcc_assert (value
== const1_rtx
);
1117 last
= get_last_insn ();
1118 if (targetm
.have_builtin_longjmp ())
1119 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1122 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1123 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1124 GET_MODE_SIZE (Pmode
)));
1126 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1127 2 * GET_MODE_SIZE (Pmode
)));
1128 set_mem_alias_set (fp
, setjmp_alias_set
);
1129 set_mem_alias_set (lab
, setjmp_alias_set
);
1130 set_mem_alias_set (stack
, setjmp_alias_set
);
1132 /* Pick up FP, label, and SP from the block and jump. This code is
1133 from expand_goto in stmt.c; see there for detailed comments. */
1134 if (targetm
.have_nonlocal_goto ())
1135 /* We have to pass a value to the nonlocal_goto pattern that will
1136 get copied into the static_chain pointer, but it does not matter
1137 what that value is, because builtin_setjmp does not use it. */
1138 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1141 lab
= copy_to_reg (lab
);
1143 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1144 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1146 /* Restore the frame pointer and stack pointer. We must use a
1147 temporary since the setjmp buffer may be a local. */
1148 fp
= copy_to_reg (fp
);
1149 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1150 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1152 emit_use (hard_frame_pointer_rtx
);
1153 emit_use (stack_pointer_rtx
);
1154 emit_indirect_jump (lab
);
1158 /* Search backwards and mark the jump insn as a non-local goto.
1159 Note that this precludes the use of __builtin_longjmp to a
1160 __builtin_setjmp target in the same function. However, we've
1161 already cautioned the user that these functions are for
1162 internal exception handling use only. */
1163 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1165 gcc_assert (insn
!= last
);
1169 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1172 else if (CALL_P (insn
))
1178 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1180 return (iter
->i
< iter
->n
);
1183 /* This function validates the types of a function call argument list
1184 against a specified list of tree_codes. If the last specifier is a 0,
1185 that represents an ellipsis, otherwise the last specifier must be a
1189 validate_arglist (const_tree callexpr
, ...)
1191 enum tree_code code
;
1194 const_call_expr_arg_iterator iter
;
1197 va_start (ap
, callexpr
);
1198 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1200 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1201 tree fn
= CALL_EXPR_FN (callexpr
);
1202 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1204 for (unsigned argno
= 1; ; ++argno
)
1206 code
= (enum tree_code
) va_arg (ap
, int);
1211 /* This signifies an ellipses, any further arguments are all ok. */
1215 /* This signifies an endlink, if no arguments remain, return
1216 true, otherwise return false. */
1217 res
= !more_const_call_expr_args_p (&iter
);
1220 /* The actual argument must be nonnull when either the whole
1221 called function has been declared nonnull, or when the formal
1222 argument corresponding to the actual argument has been. */
1224 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1226 arg
= next_const_call_expr_arg (&iter
);
1227 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1233 /* If no parameters remain or the parameter's code does not
1234 match the specified code, return false. Otherwise continue
1235 checking any remaining arguments. */
1236 arg
= next_const_call_expr_arg (&iter
);
1237 if (!validate_arg (arg
, code
))
1243 /* We need gotos here since we can only have one VA_CLOSE in a
1248 BITMAP_FREE (argmap
);
1253 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1254 and the address of the save area. */
1257 expand_builtin_nonlocal_goto (tree exp
)
1259 tree t_label
, t_save_area
;
1260 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1263 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1266 t_label
= CALL_EXPR_ARG (exp
, 0);
1267 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1269 r_label
= expand_normal (t_label
);
1270 r_label
= convert_memory_address (Pmode
, r_label
);
1271 r_save_area
= expand_normal (t_save_area
);
1272 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1273 /* Copy the address of the save location to a register just in case it was
1274 based on the frame pointer. */
1275 r_save_area
= copy_to_reg (r_save_area
);
1276 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1277 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1278 plus_constant (Pmode
, r_save_area
,
1279 GET_MODE_SIZE (Pmode
)));
1281 crtl
->has_nonlocal_goto
= 1;
1283 /* ??? We no longer need to pass the static chain value, afaik. */
1284 if (targetm
.have_nonlocal_goto ())
1285 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1288 r_label
= copy_to_reg (r_label
);
1290 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1291 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1293 /* Restore the frame pointer and stack pointer. We must use a
1294 temporary since the setjmp buffer may be a local. */
1295 r_fp
= copy_to_reg (r_fp
);
1296 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1297 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1299 /* USE of hard_frame_pointer_rtx added for consistency;
1300 not clear if really needed. */
1301 emit_use (hard_frame_pointer_rtx
);
1302 emit_use (stack_pointer_rtx
);
1304 /* If the architecture is using a GP register, we must
1305 conservatively assume that the target function makes use of it.
1306 The prologue of functions with nonlocal gotos must therefore
1307 initialize the GP register to the appropriate value, and we
1308 must then make sure that this value is live at the point
1309 of the jump. (Note that this doesn't necessarily apply
1310 to targets with a nonlocal_goto pattern; they are free
1311 to implement it in their own way. Note also that this is
1312 a no-op if the GP register is a global invariant.) */
1313 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1314 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1315 emit_use (pic_offset_table_rtx
);
1317 emit_indirect_jump (r_label
);
1320 /* Search backwards to the jump insn and mark it as a
1322 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1326 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1329 else if (CALL_P (insn
))
1336 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1337 (not all will be used on all machines) that was passed to __builtin_setjmp.
1338 It updates the stack pointer in that block to the current value. This is
1339 also called directly by the SJLJ exception handling code. */
1342 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1344 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1345 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1347 = gen_rtx_MEM (sa_mode
,
1350 plus_constant (Pmode
, buf_addr
,
1351 2 * GET_MODE_SIZE (Pmode
))));
1353 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1356 /* Expand a call to __builtin_prefetch. For a target that does not support
1357 data prefetch, evaluate the memory address argument in case it has side
1361 expand_builtin_prefetch (tree exp
)
1363 tree arg0
, arg1
, arg2
;
1367 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1370 arg0
= CALL_EXPR_ARG (exp
, 0);
1372 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1373 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1375 nargs
= call_expr_nargs (exp
);
1377 arg1
= CALL_EXPR_ARG (exp
, 1);
1379 arg1
= integer_zero_node
;
1381 arg2
= CALL_EXPR_ARG (exp
, 2);
1383 arg2
= integer_three_node
;
1385 /* Argument 0 is an address. */
1386 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1388 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1389 if (TREE_CODE (arg1
) != INTEGER_CST
)
1391 error ("second argument to %<__builtin_prefetch%> must be a constant");
1392 arg1
= integer_zero_node
;
1394 op1
= expand_normal (arg1
);
1395 /* Argument 1 must be either zero or one. */
1396 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1398 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1403 /* Argument 2 (locality) must be a compile-time constant int. */
1404 if (TREE_CODE (arg2
) != INTEGER_CST
)
1406 error ("third argument to %<__builtin_prefetch%> must be a constant");
1407 arg2
= integer_zero_node
;
1409 op2
= expand_normal (arg2
);
1410 /* Argument 2 must be 0, 1, 2, or 3. */
1411 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1413 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1417 if (targetm
.have_prefetch ())
1419 struct expand_operand ops
[3];
1421 create_address_operand (&ops
[0], op0
);
1422 create_integer_operand (&ops
[1], INTVAL (op1
));
1423 create_integer_operand (&ops
[2], INTVAL (op2
));
1424 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1428 /* Don't do anything with direct references to volatile memory, but
1429 generate code to handle other side effects. */
1430 if (!MEM_P (op0
) && side_effects_p (op0
))
1434 /* Get a MEM rtx for expression EXP which is the address of an operand
1435 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1436 the maximum length of the block of memory that might be accessed or
1440 get_memory_rtx (tree exp
, tree len
)
1442 tree orig_exp
= exp
;
1445 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1446 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1447 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1448 exp
= TREE_OPERAND (exp
, 0);
1450 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1451 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1453 /* Get an expression we can use to find the attributes to assign to MEM.
1454 First remove any nops. */
1455 while (CONVERT_EXPR_P (exp
)
1456 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1457 exp
= TREE_OPERAND (exp
, 0);
1459 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1460 (as builtin stringops may alias with anything). */
1461 exp
= fold_build2 (MEM_REF
,
1462 build_array_type (char_type_node
,
1463 build_range_type (sizetype
,
1464 size_one_node
, len
)),
1465 exp
, build_int_cst (ptr_type_node
, 0));
1467 /* If the MEM_REF has no acceptable address, try to get the base object
1468 from the original address we got, and build an all-aliasing
1469 unknown-sized access to that one. */
1470 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1471 set_mem_attributes (mem
, exp
, 0);
1472 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1473 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1476 exp
= build_fold_addr_expr (exp
);
1477 exp
= fold_build2 (MEM_REF
,
1478 build_array_type (char_type_node
,
1479 build_range_type (sizetype
,
1482 exp
, build_int_cst (ptr_type_node
, 0));
1483 set_mem_attributes (mem
, exp
, 0);
1485 set_mem_alias_set (mem
, 0);
1489 /* Built-in functions to perform an untyped call and return. */
1491 #define apply_args_mode \
1492 (this_target_builtins->x_apply_args_mode)
1493 #define apply_result_mode \
1494 (this_target_builtins->x_apply_result_mode)
1496 /* Return the size required for the block returned by __builtin_apply_args,
1497 and initialize apply_args_mode. */
1500 apply_args_size (void)
1502 static int size
= -1;
1506 /* The values computed by this function never change. */
1509 /* The first value is the incoming arg-pointer. */
1510 size
= GET_MODE_SIZE (Pmode
);
1512 /* The second value is the structure value address unless this is
1513 passed as an "invisible" first argument. */
1514 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1515 size
+= GET_MODE_SIZE (Pmode
);
1517 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1518 if (FUNCTION_ARG_REGNO_P (regno
))
1520 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1522 gcc_assert (mode
!= VOIDmode
);
1524 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1525 if (size
% align
!= 0)
1526 size
= CEIL (size
, align
) * align
;
1527 size
+= GET_MODE_SIZE (mode
);
1528 apply_args_mode
[regno
] = mode
;
1532 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1538 /* Return the size required for the block returned by __builtin_apply,
1539 and initialize apply_result_mode. */
1542 apply_result_size (void)
1544 static int size
= -1;
1547 /* The values computed by this function never change. */
1552 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1553 if (targetm
.calls
.function_value_regno_p (regno
))
1555 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1557 gcc_assert (mode
!= VOIDmode
);
1559 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1560 if (size
% align
!= 0)
1561 size
= CEIL (size
, align
) * align
;
1562 size
+= GET_MODE_SIZE (mode
);
1563 apply_result_mode
[regno
] = mode
;
1566 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1568 /* Allow targets that use untyped_call and untyped_return to override
1569 the size so that machine-specific information can be stored here. */
1570 #ifdef APPLY_RESULT_SIZE
1571 size
= APPLY_RESULT_SIZE
;
1577 /* Create a vector describing the result block RESULT. If SAVEP is true,
1578 the result block is used to save the values; otherwise it is used to
1579 restore the values. */
1582 result_vector (int savep
, rtx result
)
1584 int regno
, size
, align
, nelts
;
1585 fixed_size_mode mode
;
1587 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1590 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1591 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1593 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1594 if (size
% align
!= 0)
1595 size
= CEIL (size
, align
) * align
;
1596 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1597 mem
= adjust_address (result
, mode
, size
);
1598 savevec
[nelts
++] = (savep
1599 ? gen_rtx_SET (mem
, reg
)
1600 : gen_rtx_SET (reg
, mem
));
1601 size
+= GET_MODE_SIZE (mode
);
1603 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1606 /* Save the state required to perform an untyped call with the same
1607 arguments as were passed to the current function. */
1610 expand_builtin_apply_args_1 (void)
1613 int size
, align
, regno
;
1614 fixed_size_mode mode
;
1615 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1617 /* Create a block where the arg-pointer, structure value address,
1618 and argument registers can be saved. */
1619 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1621 /* Walk past the arg-pointer and structure value address. */
1622 size
= GET_MODE_SIZE (Pmode
);
1623 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1624 size
+= GET_MODE_SIZE (Pmode
);
1626 /* Save each register used in calling a function to the block. */
1627 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1628 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1630 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1631 if (size
% align
!= 0)
1632 size
= CEIL (size
, align
) * align
;
1634 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1636 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1637 size
+= GET_MODE_SIZE (mode
);
1640 /* Save the arg pointer to the block. */
1641 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1642 /* We need the pointer as the caller actually passed them to us, not
1643 as we might have pretended they were passed. Make sure it's a valid
1644 operand, as emit_move_insn isn't expected to handle a PLUS. */
1645 if (STACK_GROWS_DOWNWARD
)
1647 = force_operand (plus_constant (Pmode
, tem
,
1648 crtl
->args
.pretend_args_size
),
1650 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1652 size
= GET_MODE_SIZE (Pmode
);
1654 /* Save the structure value address unless this is passed as an
1655 "invisible" first argument. */
1656 if (struct_incoming_value
)
1658 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1659 copy_to_reg (struct_incoming_value
));
1660 size
+= GET_MODE_SIZE (Pmode
);
1663 /* Return the address of the block. */
1664 return copy_addr_to_reg (XEXP (registers
, 0));
1667 /* __builtin_apply_args returns block of memory allocated on
1668 the stack into which is stored the arg pointer, structure
1669 value address, static chain, and all the registers that might
1670 possibly be used in performing a function call. The code is
1671 moved to the start of the function so the incoming values are
1675 expand_builtin_apply_args (void)
1677 /* Don't do __builtin_apply_args more than once in a function.
1678 Save the result of the first call and reuse it. */
1679 if (apply_args_value
!= 0)
1680 return apply_args_value
;
1682 /* When this function is called, it means that registers must be
1683 saved on entry to this function. So we migrate the
1684 call to the first insn of this function. */
1688 temp
= expand_builtin_apply_args_1 ();
1689 rtx_insn
*seq
= get_insns ();
1692 apply_args_value
= temp
;
1694 /* Put the insns after the NOTE that starts the function.
1695 If this is inside a start_sequence, make the outer-level insn
1696 chain current, so the code is placed at the start of the
1697 function. If internal_arg_pointer is a non-virtual pseudo,
1698 it needs to be placed after the function that initializes
1700 push_topmost_sequence ();
1701 if (REG_P (crtl
->args
.internal_arg_pointer
)
1702 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1703 emit_insn_before (seq
, parm_birth_insn
);
1705 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1706 pop_topmost_sequence ();
1711 /* Perform an untyped call and save the state required to perform an
1712 untyped return of whatever value was returned by the given function. */
1715 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1717 int size
, align
, regno
;
1718 fixed_size_mode mode
;
1719 rtx incoming_args
, result
, reg
, dest
, src
;
1720 rtx_call_insn
*call_insn
;
1721 rtx old_stack_level
= 0;
1722 rtx call_fusage
= 0;
1723 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1725 arguments
= convert_memory_address (Pmode
, arguments
);
1727 /* Create a block where the return registers can be saved. */
1728 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1730 /* Fetch the arg pointer from the ARGUMENTS block. */
1731 incoming_args
= gen_reg_rtx (Pmode
);
1732 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1733 if (!STACK_GROWS_DOWNWARD
)
1734 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1735 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1737 /* Push a new argument block and copy the arguments. Do not allow
1738 the (potential) memcpy call below to interfere with our stack
1740 do_pending_stack_adjust ();
1743 /* Save the stack with nonlocal if available. */
1744 if (targetm
.have_save_stack_nonlocal ())
1745 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1747 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1749 /* Allocate a block of memory onto the stack and copy the memory
1750 arguments to the outgoing arguments address. We can pass TRUE
1751 as the 4th argument because we just saved the stack pointer
1752 and will restore it right after the call. */
1753 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1755 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1756 may have already set current_function_calls_alloca to true.
1757 current_function_calls_alloca won't be set if argsize is zero,
1758 so we have to guarantee need_drap is true here. */
1759 if (SUPPORTS_STACK_ALIGNMENT
)
1760 crtl
->need_drap
= true;
1762 dest
= virtual_outgoing_args_rtx
;
1763 if (!STACK_GROWS_DOWNWARD
)
1765 if (CONST_INT_P (argsize
))
1766 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1768 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1770 dest
= gen_rtx_MEM (BLKmode
, dest
);
1771 set_mem_align (dest
, PARM_BOUNDARY
);
1772 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1773 set_mem_align (src
, PARM_BOUNDARY
);
1774 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1776 /* Refer to the argument block. */
1778 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1779 set_mem_align (arguments
, PARM_BOUNDARY
);
1781 /* Walk past the arg-pointer and structure value address. */
1782 size
= GET_MODE_SIZE (Pmode
);
1784 size
+= GET_MODE_SIZE (Pmode
);
1786 /* Restore each of the registers previously saved. Make USE insns
1787 for each of these registers for use in making the call. */
1788 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1789 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1791 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1792 if (size
% align
!= 0)
1793 size
= CEIL (size
, align
) * align
;
1794 reg
= gen_rtx_REG (mode
, regno
);
1795 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1796 use_reg (&call_fusage
, reg
);
1797 size
+= GET_MODE_SIZE (mode
);
1800 /* Restore the structure value address unless this is passed as an
1801 "invisible" first argument. */
1802 size
= GET_MODE_SIZE (Pmode
);
1805 rtx value
= gen_reg_rtx (Pmode
);
1806 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1807 emit_move_insn (struct_value
, value
);
1808 if (REG_P (struct_value
))
1809 use_reg (&call_fusage
, struct_value
);
1810 size
+= GET_MODE_SIZE (Pmode
);
1813 /* All arguments and registers used for the call are set up by now! */
1814 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1816 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1817 and we don't want to load it into a register as an optimization,
1818 because prepare_call_address already did it if it should be done. */
1819 if (GET_CODE (function
) != SYMBOL_REF
)
1820 function
= memory_address (FUNCTION_MODE
, function
);
1822 /* Generate the actual call instruction and save the return value. */
1823 if (targetm
.have_untyped_call ())
1825 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1826 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1827 result_vector (1, result
)));
1829 else if (targetm
.have_call_value ())
1833 /* Locate the unique return register. It is not possible to
1834 express a call that sets more than one return register using
1835 call_value; use untyped_call for that. In fact, untyped_call
1836 only needs to save the return registers in the given block. */
1837 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1838 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1840 gcc_assert (!valreg
); /* have_untyped_call required. */
1842 valreg
= gen_rtx_REG (mode
, regno
);
1845 emit_insn (targetm
.gen_call_value (valreg
,
1846 gen_rtx_MEM (FUNCTION_MODE
, function
),
1847 const0_rtx
, NULL_RTX
, const0_rtx
));
1849 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1854 /* Find the CALL insn we just emitted, and attach the register usage
1856 call_insn
= last_call_insn ();
1857 add_function_usage_to (call_insn
, call_fusage
);
1859 /* Restore the stack. */
1860 if (targetm
.have_save_stack_nonlocal ())
1861 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1863 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1864 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1868 /* Return the address of the result block. */
1869 result
= copy_addr_to_reg (XEXP (result
, 0));
1870 return convert_memory_address (ptr_mode
, result
);
1873 /* Perform an untyped return. */
1876 expand_builtin_return (rtx result
)
1878 int size
, align
, regno
;
1879 fixed_size_mode mode
;
1881 rtx_insn
*call_fusage
= 0;
1883 result
= convert_memory_address (Pmode
, result
);
1885 apply_result_size ();
1886 result
= gen_rtx_MEM (BLKmode
, result
);
1888 if (targetm
.have_untyped_return ())
1890 rtx vector
= result_vector (0, result
);
1891 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1896 /* Restore the return value and note that each value is used. */
1898 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1899 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1901 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1902 if (size
% align
!= 0)
1903 size
= CEIL (size
, align
) * align
;
1904 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1905 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1907 push_to_sequence (call_fusage
);
1909 call_fusage
= get_insns ();
1911 size
+= GET_MODE_SIZE (mode
);
1914 /* Put the USE insns before the return. */
1915 emit_insn (call_fusage
);
1917 /* Return whatever values was restored by jumping directly to the end
1919 expand_naked_return ();
1922 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1924 static enum type_class
1925 type_to_class (tree type
)
1927 switch (TREE_CODE (type
))
1929 case VOID_TYPE
: return void_type_class
;
1930 case INTEGER_TYPE
: return integer_type_class
;
1931 case ENUMERAL_TYPE
: return enumeral_type_class
;
1932 case BOOLEAN_TYPE
: return boolean_type_class
;
1933 case POINTER_TYPE
: return pointer_type_class
;
1934 case REFERENCE_TYPE
: return reference_type_class
;
1935 case OFFSET_TYPE
: return offset_type_class
;
1936 case REAL_TYPE
: return real_type_class
;
1937 case COMPLEX_TYPE
: return complex_type_class
;
1938 case FUNCTION_TYPE
: return function_type_class
;
1939 case METHOD_TYPE
: return method_type_class
;
1940 case RECORD_TYPE
: return record_type_class
;
1942 case QUAL_UNION_TYPE
: return union_type_class
;
1943 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1944 ? string_type_class
: array_type_class
);
1945 case LANG_TYPE
: return lang_type_class
;
1946 default: return no_type_class
;
1950 /* Expand a call EXP to __builtin_classify_type. */
1953 expand_builtin_classify_type (tree exp
)
1955 if (call_expr_nargs (exp
))
1956 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1957 return GEN_INT (no_type_class
);
1960 /* This helper macro, meant to be used in mathfn_built_in below, determines
1961 which among a set of builtin math functions is appropriate for a given type
1962 mode. The `F' (float) and `L' (long double) are automatically generated
1963 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1964 types, there are additional types that are considered with 'F32', 'F64',
1965 'F128', etc. suffixes. */
1966 #define CASE_MATHFN(MATHFN) \
1967 CASE_CFN_##MATHFN: \
1968 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1969 fcodel = BUILT_IN_##MATHFN##L ; break;
1970 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1972 #define CASE_MATHFN_FLOATN(MATHFN) \
1973 CASE_CFN_##MATHFN: \
1974 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1975 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1976 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1977 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1978 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1980 /* Similar to above, but appends _R after any F/L suffix. */
1981 #define CASE_MATHFN_REENT(MATHFN) \
1982 case CFN_BUILT_IN_##MATHFN##_R: \
1983 case CFN_BUILT_IN_##MATHFN##F_R: \
1984 case CFN_BUILT_IN_##MATHFN##L_R: \
1985 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1986 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1988 /* Return a function equivalent to FN but operating on floating-point
1989 values of type TYPE, or END_BUILTINS if no such function exists.
1990 This is purely an operation on function codes; it does not guarantee
1991 that the target actually has an implementation of the function. */
1993 static built_in_function
1994 mathfn_built_in_2 (tree type
, combined_fn fn
)
1997 built_in_function fcode
, fcodef
, fcodel
;
1998 built_in_function fcodef16
= END_BUILTINS
;
1999 built_in_function fcodef32
= END_BUILTINS
;
2000 built_in_function fcodef64
= END_BUILTINS
;
2001 built_in_function fcodef128
= END_BUILTINS
;
2002 built_in_function fcodef32x
= END_BUILTINS
;
2003 built_in_function fcodef64x
= END_BUILTINS
;
2004 built_in_function fcodef128x
= END_BUILTINS
;
2016 CASE_MATHFN_FLOATN (CEIL
)
2018 CASE_MATHFN_FLOATN (COPYSIGN
)
2030 CASE_MATHFN_FLOATN (FLOOR
)
2031 CASE_MATHFN_FLOATN (FMA
)
2032 CASE_MATHFN_FLOATN (FMAX
)
2033 CASE_MATHFN_FLOATN (FMIN
)
2037 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
2038 CASE_MATHFN (HUGE_VAL
)
2042 CASE_MATHFN (IFLOOR
)
2045 CASE_MATHFN (IROUND
)
2052 CASE_MATHFN (LFLOOR
)
2053 CASE_MATHFN (LGAMMA
)
2054 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
2055 CASE_MATHFN (LLCEIL
)
2056 CASE_MATHFN (LLFLOOR
)
2057 CASE_MATHFN (LLRINT
)
2058 CASE_MATHFN (LLROUND
)
2065 CASE_MATHFN (LROUND
)
2069 CASE_MATHFN_FLOATN (NEARBYINT
)
2070 CASE_MATHFN (NEXTAFTER
)
2071 CASE_MATHFN (NEXTTOWARD
)
2075 CASE_MATHFN (REMAINDER
)
2076 CASE_MATHFN (REMQUO
)
2077 CASE_MATHFN_FLOATN (RINT
)
2078 CASE_MATHFN_FLOATN (ROUND
)
2080 CASE_MATHFN (SCALBLN
)
2081 CASE_MATHFN (SCALBN
)
2082 CASE_MATHFN (SIGNBIT
)
2083 CASE_MATHFN (SIGNIFICAND
)
2085 CASE_MATHFN (SINCOS
)
2087 CASE_MATHFN_FLOATN (SQRT
)
2090 CASE_MATHFN (TGAMMA
)
2091 CASE_MATHFN_FLOATN (TRUNC
)
2097 return END_BUILTINS
;
2100 mtype
= TYPE_MAIN_VARIANT (type
);
2101 if (mtype
== double_type_node
)
2103 else if (mtype
== float_type_node
)
2105 else if (mtype
== long_double_type_node
)
2107 else if (mtype
== float16_type_node
)
2109 else if (mtype
== float32_type_node
)
2111 else if (mtype
== float64_type_node
)
2113 else if (mtype
== float128_type_node
)
2115 else if (mtype
== float32x_type_node
)
2117 else if (mtype
== float64x_type_node
)
2119 else if (mtype
== float128x_type_node
)
2122 return END_BUILTINS
;
2125 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2126 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2127 otherwise use the explicit declaration. If we can't do the conversion,
2131 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2133 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2134 if (fcode2
== END_BUILTINS
)
2137 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2140 return builtin_decl_explicit (fcode2
);
2143 /* Like mathfn_built_in_1, but always use the implicit array. */
2146 mathfn_built_in (tree type
, combined_fn fn
)
2148 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2151 /* Like mathfn_built_in_1, but take a built_in_function and
2152 always use the implicit array. */
2155 mathfn_built_in (tree type
, enum built_in_function fn
)
2157 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2160 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2161 return its code, otherwise return IFN_LAST. Note that this function
2162 only tests whether the function is defined in internals.def, not whether
2163 it is actually available on the target. */
2166 associated_internal_fn (tree fndecl
)
2168 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2169 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2170 switch (DECL_FUNCTION_CODE (fndecl
))
2172 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2173 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2174 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2175 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2176 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2177 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2178 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2179 #include "internal-fn.def"
2181 CASE_FLT_FN (BUILT_IN_POW10
):
2184 CASE_FLT_FN (BUILT_IN_DREM
):
2185 return IFN_REMAINDER
;
2187 CASE_FLT_FN (BUILT_IN_SCALBN
):
2188 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2189 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2198 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2199 on the current target by a call to an internal function, return the
2200 code of that internal function, otherwise return IFN_LAST. The caller
2201 is responsible for ensuring that any side-effects of the built-in
2202 call are dealt with correctly. E.g. if CALL sets errno, the caller
2203 must decide that the errno result isn't needed or make it available
2204 in some other way. */
2207 replacement_internal_fn (gcall
*call
)
2209 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2211 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2212 if (ifn
!= IFN_LAST
)
2214 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2215 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2216 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2223 /* Expand a call to the builtin trinary math functions (fma).
2224 Return NULL_RTX if a normal call should be emitted rather than expanding the
2225 function in-line. EXP is the expression that is a call to the builtin
2226 function; if convenient, the result should be placed in TARGET.
2227 SUBTARGET may be used as the target for computing one of EXP's
2231 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2233 optab builtin_optab
;
2234 rtx op0
, op1
, op2
, result
;
2236 tree fndecl
= get_callee_fndecl (exp
);
2237 tree arg0
, arg1
, arg2
;
2240 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2243 arg0
= CALL_EXPR_ARG (exp
, 0);
2244 arg1
= CALL_EXPR_ARG (exp
, 1);
2245 arg2
= CALL_EXPR_ARG (exp
, 2);
2247 switch (DECL_FUNCTION_CODE (fndecl
))
2249 CASE_FLT_FN (BUILT_IN_FMA
):
2250 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2251 builtin_optab
= fma_optab
; break;
2256 /* Make a suitable register to place result in. */
2257 mode
= TYPE_MODE (TREE_TYPE (exp
));
2259 /* Before working hard, check whether the instruction is available. */
2260 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2263 result
= gen_reg_rtx (mode
);
2265 /* Always stabilize the argument list. */
2266 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2267 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2268 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2270 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2271 op1
= expand_normal (arg1
);
2272 op2
= expand_normal (arg2
);
2276 /* Compute into RESULT.
2277 Set RESULT to wherever the result comes back. */
2278 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2281 /* If we were unable to expand via the builtin, stop the sequence
2282 (without outputting the insns) and call to the library function
2283 with the stabilized argument list. */
2287 return expand_call (exp
, target
, target
== const0_rtx
);
2290 /* Output the entire sequence. */
2291 insns
= get_insns ();
2298 /* Expand a call to the builtin sin and cos math functions.
2299 Return NULL_RTX if a normal call should be emitted rather than expanding the
2300 function in-line. EXP is the expression that is a call to the builtin
2301 function; if convenient, the result should be placed in TARGET.
2302 SUBTARGET may be used as the target for computing one of EXP's
2306 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2308 optab builtin_optab
;
2311 tree fndecl
= get_callee_fndecl (exp
);
2315 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2318 arg
= CALL_EXPR_ARG (exp
, 0);
2320 switch (DECL_FUNCTION_CODE (fndecl
))
2322 CASE_FLT_FN (BUILT_IN_SIN
):
2323 CASE_FLT_FN (BUILT_IN_COS
):
2324 builtin_optab
= sincos_optab
; break;
2329 /* Make a suitable register to place result in. */
2330 mode
= TYPE_MODE (TREE_TYPE (exp
));
2332 /* Check if sincos insn is available, otherwise fallback
2333 to sin or cos insn. */
2334 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2335 switch (DECL_FUNCTION_CODE (fndecl
))
2337 CASE_FLT_FN (BUILT_IN_SIN
):
2338 builtin_optab
= sin_optab
; break;
2339 CASE_FLT_FN (BUILT_IN_COS
):
2340 builtin_optab
= cos_optab
; break;
2345 /* Before working hard, check whether the instruction is available. */
2346 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2348 rtx result
= gen_reg_rtx (mode
);
2350 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2351 need to expand the argument again. This way, we will not perform
2352 side-effects more the once. */
2353 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2355 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2359 /* Compute into RESULT.
2360 Set RESULT to wherever the result comes back. */
2361 if (builtin_optab
== sincos_optab
)
2365 switch (DECL_FUNCTION_CODE (fndecl
))
2367 CASE_FLT_FN (BUILT_IN_SIN
):
2368 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2370 CASE_FLT_FN (BUILT_IN_COS
):
2371 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2379 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2383 /* Output the entire sequence. */
2384 insns
= get_insns ();
2390 /* If we were unable to expand via the builtin, stop the sequence
2391 (without outputting the insns) and call to the library function
2392 with the stabilized argument list. */
2396 return expand_call (exp
, target
, target
== const0_rtx
);
2399 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2400 return an RTL instruction code that implements the functionality.
2401 If that isn't possible or available return CODE_FOR_nothing. */
2403 static enum insn_code
2404 interclass_mathfn_icode (tree arg
, tree fndecl
)
2406 bool errno_set
= false;
2407 optab builtin_optab
= unknown_optab
;
2410 switch (DECL_FUNCTION_CODE (fndecl
))
2412 CASE_FLT_FN (BUILT_IN_ILOGB
):
2413 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2414 CASE_FLT_FN (BUILT_IN_ISINF
):
2415 builtin_optab
= isinf_optab
; break;
2416 case BUILT_IN_ISNORMAL
:
2417 case BUILT_IN_ISFINITE
:
2418 CASE_FLT_FN (BUILT_IN_FINITE
):
2419 case BUILT_IN_FINITED32
:
2420 case BUILT_IN_FINITED64
:
2421 case BUILT_IN_FINITED128
:
2422 case BUILT_IN_ISINFD32
:
2423 case BUILT_IN_ISINFD64
:
2424 case BUILT_IN_ISINFD128
:
2425 /* These builtins have no optabs (yet). */
2431 /* There's no easy way to detect the case we need to set EDOM. */
2432 if (flag_errno_math
&& errno_set
)
2433 return CODE_FOR_nothing
;
2435 /* Optab mode depends on the mode of the input argument. */
2436 mode
= TYPE_MODE (TREE_TYPE (arg
));
2439 return optab_handler (builtin_optab
, mode
);
2440 return CODE_FOR_nothing
;
2443 /* Expand a call to one of the builtin math functions that operate on
2444 floating point argument and output an integer result (ilogb, isinf,
2446 Return 0 if a normal call should be emitted rather than expanding the
2447 function in-line. EXP is the expression that is a call to the builtin
2448 function; if convenient, the result should be placed in TARGET. */
2451 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2453 enum insn_code icode
= CODE_FOR_nothing
;
2455 tree fndecl
= get_callee_fndecl (exp
);
2459 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2462 arg
= CALL_EXPR_ARG (exp
, 0);
2463 icode
= interclass_mathfn_icode (arg
, fndecl
);
2464 mode
= TYPE_MODE (TREE_TYPE (arg
));
2466 if (icode
!= CODE_FOR_nothing
)
2468 struct expand_operand ops
[1];
2469 rtx_insn
*last
= get_last_insn ();
2470 tree orig_arg
= arg
;
2472 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2473 need to expand the argument again. This way, we will not perform
2474 side-effects more the once. */
2475 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2477 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2479 if (mode
!= GET_MODE (op0
))
2480 op0
= convert_to_mode (mode
, op0
, 0);
2482 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2483 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2484 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2485 return ops
[0].value
;
2487 delete_insns_since (last
);
2488 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2494 /* Expand a call to the builtin sincos math function.
2495 Return NULL_RTX if a normal call should be emitted rather than expanding the
2496 function in-line. EXP is the expression that is a call to the builtin
2500 expand_builtin_sincos (tree exp
)
2502 rtx op0
, op1
, op2
, target1
, target2
;
2504 tree arg
, sinp
, cosp
;
2506 location_t loc
= EXPR_LOCATION (exp
);
2507 tree alias_type
, alias_off
;
2509 if (!validate_arglist (exp
, REAL_TYPE
,
2510 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2513 arg
= CALL_EXPR_ARG (exp
, 0);
2514 sinp
= CALL_EXPR_ARG (exp
, 1);
2515 cosp
= CALL_EXPR_ARG (exp
, 2);
2517 /* Make a suitable register to place result in. */
2518 mode
= TYPE_MODE (TREE_TYPE (arg
));
2520 /* Check if sincos insn is available, otherwise emit the call. */
2521 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2524 target1
= gen_reg_rtx (mode
);
2525 target2
= gen_reg_rtx (mode
);
2527 op0
= expand_normal (arg
);
2528 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2529 alias_off
= build_int_cst (alias_type
, 0);
2530 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2532 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2535 /* Compute into target1 and target2.
2536 Set TARGET to wherever the result comes back. */
2537 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2538 gcc_assert (result
);
2540 /* Move target1 and target2 to the memory locations indicated
2542 emit_move_insn (op1
, target1
);
2543 emit_move_insn (op2
, target2
);
2548 /* Expand a call to the internal cexpi builtin to the sincos math function.
2549 EXP is the expression that is a call to the builtin function; if convenient,
2550 the result should be placed in TARGET. */
2553 expand_builtin_cexpi (tree exp
, rtx target
)
2555 tree fndecl
= get_callee_fndecl (exp
);
2559 location_t loc
= EXPR_LOCATION (exp
);
2561 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2564 arg
= CALL_EXPR_ARG (exp
, 0);
2565 type
= TREE_TYPE (arg
);
2566 mode
= TYPE_MODE (TREE_TYPE (arg
));
2568 /* Try expanding via a sincos optab, fall back to emitting a libcall
2569 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2570 is only generated from sincos, cexp or if we have either of them. */
2571 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2573 op1
= gen_reg_rtx (mode
);
2574 op2
= gen_reg_rtx (mode
);
2576 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2578 /* Compute into op1 and op2. */
2579 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2581 else if (targetm
.libc_has_function (function_sincos
))
2583 tree call
, fn
= NULL_TREE
;
2587 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2588 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2589 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2590 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2591 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2592 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2596 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2597 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2598 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2599 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2600 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2601 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2603 /* Make sure not to fold the sincos call again. */
2604 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2605 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2606 call
, 3, arg
, top1
, top2
));
2610 tree call
, fn
= NULL_TREE
, narg
;
2611 tree ctype
= build_complex_type (type
);
2613 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2614 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2615 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2616 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2617 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2618 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2622 /* If we don't have a decl for cexp create one. This is the
2623 friendliest fallback if the user calls __builtin_cexpi
2624 without full target C99 function support. */
2625 if (fn
== NULL_TREE
)
2628 const char *name
= NULL
;
2630 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2632 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2634 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2637 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2638 fn
= build_fn_decl (name
, fntype
);
2641 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2642 build_real (type
, dconst0
), arg
);
2644 /* Make sure not to fold the cexp call again. */
2645 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2646 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2647 target
, VOIDmode
, EXPAND_NORMAL
);
2650 /* Now build the proper return type. */
2651 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2652 make_tree (TREE_TYPE (arg
), op2
),
2653 make_tree (TREE_TYPE (arg
), op1
)),
2654 target
, VOIDmode
, EXPAND_NORMAL
);
2657 /* Conveniently construct a function call expression. FNDECL names the
2658 function to be called, N is the number of arguments, and the "..."
2659 parameters are the argument expressions. Unlike build_call_exr
2660 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2663 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2666 tree fntype
= TREE_TYPE (fndecl
);
2667 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2670 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2672 SET_EXPR_LOCATION (fn
, loc
);
2676 /* Expand a call to one of the builtin rounding functions gcc defines
2677 as an extension (lfloor and lceil). As these are gcc extensions we
2678 do not need to worry about setting errno to EDOM.
2679 If expanding via optab fails, lower expression to (int)(floor(x)).
2680 EXP is the expression that is a call to the builtin function;
2681 if convenient, the result should be placed in TARGET. */
2684 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2686 convert_optab builtin_optab
;
2689 tree fndecl
= get_callee_fndecl (exp
);
2690 enum built_in_function fallback_fn
;
2691 tree fallback_fndecl
;
2695 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2698 arg
= CALL_EXPR_ARG (exp
, 0);
2700 switch (DECL_FUNCTION_CODE (fndecl
))
2702 CASE_FLT_FN (BUILT_IN_ICEIL
):
2703 CASE_FLT_FN (BUILT_IN_LCEIL
):
2704 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2705 builtin_optab
= lceil_optab
;
2706 fallback_fn
= BUILT_IN_CEIL
;
2709 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2710 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2711 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2712 builtin_optab
= lfloor_optab
;
2713 fallback_fn
= BUILT_IN_FLOOR
;
2720 /* Make a suitable register to place result in. */
2721 mode
= TYPE_MODE (TREE_TYPE (exp
));
2723 target
= gen_reg_rtx (mode
);
2725 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2726 need to expand the argument again. This way, we will not perform
2727 side-effects more the once. */
2728 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2730 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2734 /* Compute into TARGET. */
2735 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2737 /* Output the entire sequence. */
2738 insns
= get_insns ();
2744 /* If we were unable to expand via the builtin, stop the sequence
2745 (without outputting the insns). */
2748 /* Fall back to floating point rounding optab. */
2749 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2751 /* For non-C99 targets we may end up without a fallback fndecl here
2752 if the user called __builtin_lfloor directly. In this case emit
2753 a call to the floor/ceil variants nevertheless. This should result
2754 in the best user experience for not full C99 targets. */
2755 if (fallback_fndecl
== NULL_TREE
)
2758 const char *name
= NULL
;
2760 switch (DECL_FUNCTION_CODE (fndecl
))
2762 case BUILT_IN_ICEIL
:
2763 case BUILT_IN_LCEIL
:
2764 case BUILT_IN_LLCEIL
:
2767 case BUILT_IN_ICEILF
:
2768 case BUILT_IN_LCEILF
:
2769 case BUILT_IN_LLCEILF
:
2772 case BUILT_IN_ICEILL
:
2773 case BUILT_IN_LCEILL
:
2774 case BUILT_IN_LLCEILL
:
2777 case BUILT_IN_IFLOOR
:
2778 case BUILT_IN_LFLOOR
:
2779 case BUILT_IN_LLFLOOR
:
2782 case BUILT_IN_IFLOORF
:
2783 case BUILT_IN_LFLOORF
:
2784 case BUILT_IN_LLFLOORF
:
2787 case BUILT_IN_IFLOORL
:
2788 case BUILT_IN_LFLOORL
:
2789 case BUILT_IN_LLFLOORL
:
2796 fntype
= build_function_type_list (TREE_TYPE (arg
),
2797 TREE_TYPE (arg
), NULL_TREE
);
2798 fallback_fndecl
= build_fn_decl (name
, fntype
);
2801 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2803 tmp
= expand_normal (exp
);
2804 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2806 /* Truncate the result of floating point optab to integer
2807 via expand_fix (). */
2808 target
= gen_reg_rtx (mode
);
2809 expand_fix (target
, tmp
, 0);
2814 /* Expand a call to one of the builtin math functions doing integer
2816 Return 0 if a normal call should be emitted rather than expanding the
2817 function in-line. EXP is the expression that is a call to the builtin
2818 function; if convenient, the result should be placed in TARGET. */
2821 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2823 convert_optab builtin_optab
;
2826 tree fndecl
= get_callee_fndecl (exp
);
2829 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2831 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2834 arg
= CALL_EXPR_ARG (exp
, 0);
2836 switch (DECL_FUNCTION_CODE (fndecl
))
2838 CASE_FLT_FN (BUILT_IN_IRINT
):
2839 fallback_fn
= BUILT_IN_LRINT
;
2841 CASE_FLT_FN (BUILT_IN_LRINT
):
2842 CASE_FLT_FN (BUILT_IN_LLRINT
):
2843 builtin_optab
= lrint_optab
;
2846 CASE_FLT_FN (BUILT_IN_IROUND
):
2847 fallback_fn
= BUILT_IN_LROUND
;
2849 CASE_FLT_FN (BUILT_IN_LROUND
):
2850 CASE_FLT_FN (BUILT_IN_LLROUND
):
2851 builtin_optab
= lround_optab
;
2858 /* There's no easy way to detect the case we need to set EDOM. */
2859 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2862 /* Make a suitable register to place result in. */
2863 mode
= TYPE_MODE (TREE_TYPE (exp
));
2865 /* There's no easy way to detect the case we need to set EDOM. */
2866 if (!flag_errno_math
)
2868 rtx result
= gen_reg_rtx (mode
);
2870 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2871 need to expand the argument again. This way, we will not perform
2872 side-effects more the once. */
2873 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2875 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2879 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2881 /* Output the entire sequence. */
2882 insns
= get_insns ();
2888 /* If we were unable to expand via the builtin, stop the sequence
2889 (without outputting the insns) and call to the library function
2890 with the stabilized argument list. */
2894 if (fallback_fn
!= BUILT_IN_NONE
)
2896 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2897 targets, (int) round (x) should never be transformed into
2898 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2899 a call to lround in the hope that the target provides at least some
2900 C99 functions. This should result in the best user experience for
2901 not full C99 targets. */
2902 tree fallback_fndecl
= mathfn_built_in_1
2903 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2905 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2906 fallback_fndecl
, 1, arg
);
2908 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2909 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2910 return convert_to_mode (mode
, target
, 0);
2913 return expand_call (exp
, target
, target
== const0_rtx
);
2916 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2917 a normal call should be emitted rather than expanding the function
2918 in-line. EXP is the expression that is a call to the builtin
2919 function; if convenient, the result should be placed in TARGET. */
2922 expand_builtin_powi (tree exp
, rtx target
)
2929 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2932 arg0
= CALL_EXPR_ARG (exp
, 0);
2933 arg1
= CALL_EXPR_ARG (exp
, 1);
2934 mode
= TYPE_MODE (TREE_TYPE (exp
));
2936 /* Emit a libcall to libgcc. */
2938 /* Mode of the 2nd argument must match that of an int. */
2939 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2941 if (target
== NULL_RTX
)
2942 target
= gen_reg_rtx (mode
);
2944 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2945 if (GET_MODE (op0
) != mode
)
2946 op0
= convert_to_mode (mode
, op0
, 0);
2947 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2948 if (GET_MODE (op1
) != mode2
)
2949 op1
= convert_to_mode (mode2
, op1
, 0);
2951 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2952 target
, LCT_CONST
, mode
,
2953 op0
, mode
, op1
, mode2
);
2958 /* Expand expression EXP which is a call to the strlen builtin. Return
2959 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2960 try to get the result in TARGET, if convenient. */
2963 expand_builtin_strlen (tree exp
, rtx target
,
2964 machine_mode target_mode
)
2966 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2969 struct expand_operand ops
[4];
2972 tree src
= CALL_EXPR_ARG (exp
, 0);
2974 rtx_insn
*before_strlen
;
2975 machine_mode insn_mode
;
2976 enum insn_code icode
= CODE_FOR_nothing
;
2979 /* If the length can be computed at compile-time, return it. */
2980 len
= c_strlen (src
, 0);
2982 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2984 /* If the length can be computed at compile-time and is constant
2985 integer, but there are side-effects in src, evaluate
2986 src for side-effects, then return len.
2987 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2988 can be optimized into: i++; x = 3; */
2989 len
= c_strlen (src
, 1);
2990 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2992 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2993 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2996 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2998 /* If SRC is not a pointer type, don't do this operation inline. */
3002 /* Bail out if we can't compute strlen in the right mode. */
3003 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
3005 icode
= optab_handler (strlen_optab
, insn_mode
);
3006 if (icode
!= CODE_FOR_nothing
)
3009 if (insn_mode
== VOIDmode
)
3012 /* Make a place to hold the source address. We will not expand
3013 the actual source until we are sure that the expansion will
3014 not fail -- there are trees that cannot be expanded twice. */
3015 src_reg
= gen_reg_rtx (Pmode
);
3017 /* Mark the beginning of the strlen sequence so we can emit the
3018 source operand later. */
3019 before_strlen
= get_last_insn ();
3021 create_output_operand (&ops
[0], target
, insn_mode
);
3022 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3023 create_integer_operand (&ops
[2], 0);
3024 create_integer_operand (&ops
[3], align
);
3025 if (!maybe_expand_insn (icode
, 4, ops
))
3028 /* Check to see if the argument was declared attribute nonstring
3029 and if so, issue a warning since at this point it's not known
3030 to be nul-terminated. */
3031 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3033 /* Now that we are assured of success, expand the source. */
3035 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3038 #ifdef POINTERS_EXTEND_UNSIGNED
3039 if (GET_MODE (pat
) != Pmode
)
3040 pat
= convert_to_mode (Pmode
, pat
,
3041 POINTERS_EXTEND_UNSIGNED
);
3043 emit_move_insn (src_reg
, pat
);
3049 emit_insn_after (pat
, before_strlen
);
3051 emit_insn_before (pat
, get_insns ());
3053 /* Return the value in the proper mode for this function. */
3054 if (GET_MODE (ops
[0].value
) == target_mode
)
3055 target
= ops
[0].value
;
3056 else if (target
!= 0)
3057 convert_move (target
, ops
[0].value
, 0);
3059 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3064 /* Expand call EXP to the strnlen built-in, returning the result
3065 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3068 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3070 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3073 tree src
= CALL_EXPR_ARG (exp
, 0);
3074 tree bound
= CALL_EXPR_ARG (exp
, 1);
3079 location_t loc
= UNKNOWN_LOCATION
;
3080 if (EXPR_HAS_LOCATION (exp
))
3081 loc
= EXPR_LOCATION (exp
);
3083 tree maxobjsize
= max_object_size ();
3084 tree func
= get_callee_fndecl (exp
);
3086 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3087 so these conversions aren't necessary. */
3089 memset (&data
, 0, sizeof (c_strlen_data
));
3090 tree len
= c_strlen (src
, 0, &data
, 1);
3092 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3094 if (TREE_CODE (bound
) == INTEGER_CST
)
3096 if (!TREE_NO_WARNING (exp
)
3097 && tree_int_cst_lt (maxobjsize
, bound
)
3098 && warning_at (loc
, OPT_Wstringop_overflow_
,
3099 "%K%qD specified bound %E "
3100 "exceeds maximum object size %E",
3101 exp
, func
, bound
, maxobjsize
))
3102 TREE_NO_WARNING (exp
) = true;
3105 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3107 /* Clear EXACT if LEN may be less than SRC suggests,
3109 strnlen (&a[i], sizeof a)
3110 where the value of i is unknown. Unless i's value is
3111 zero, the call is unsafe because the bound is greater. */
3112 data
.decl
= unterminated_array (src
, &len
, &exact
);
3118 && !TREE_NO_WARNING (exp
)
3119 && ((tree_int_cst_lt (len
, bound
))
3123 = expansion_point_location_if_in_system_header (loc
);
3125 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3127 ? G_("%K%qD specified bound %E exceeds the size %E "
3128 "of unterminated array")
3129 : G_("%K%qD specified bound %E may exceed the size "
3130 "of at most %E of unterminated array"),
3131 exp
, func
, bound
, len
))
3133 inform (DECL_SOURCE_LOCATION (data
.decl
),
3134 "referenced argument declared here");
3135 TREE_NO_WARNING (exp
) = true;
3143 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3144 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3147 if (TREE_CODE (bound
) != SSA_NAME
)
3151 enum value_range_kind rng
= get_range_info (bound
, &min
, &max
);
3152 if (rng
!= VR_RANGE
)
3155 if (!TREE_NO_WARNING (exp
)
3156 && wi::ltu_p (wi::to_wide (maxobjsize
), min
)
3157 && warning_at (loc
, OPT_Wstringop_overflow_
,
3158 "%K%qD specified bound [%wu, %wu] "
3159 "exceeds maximum object size %E",
3160 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), maxobjsize
))
3161 TREE_NO_WARNING (exp
) = true;
3164 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3166 data
.decl
= unterminated_array (src
, &len
, &exact
);
3172 && !TREE_NO_WARNING (exp
)
3173 && (wi::ltu_p (wi::to_wide (len
), min
)
3177 = expansion_point_location_if_in_system_header (loc
);
3179 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3181 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3182 "the size %E of unterminated array")
3183 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3184 "the size of at most %E of unterminated array"),
3185 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), len
))
3187 inform (DECL_SOURCE_LOCATION (data
.decl
),
3188 "referenced argument declared here");
3189 TREE_NO_WARNING (exp
) = true;
3196 if (wi::gtu_p (min
, wi::to_wide (len
)))
3197 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3199 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3200 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3203 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3204 bytes from constant string DATA + OFFSET and return it as target
3208 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3209 scalar_int_mode mode
)
3211 const char *str
= (const char *) data
;
3213 gcc_assert (offset
>= 0
3214 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3215 <= strlen (str
) + 1));
3217 return c_readstr (str
+ offset
, mode
);
3220 /* LEN specify length of the block of memcpy/memset operation.
3221 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3222 In some cases we can make very likely guess on max size, then we
3223 set it into PROBABLE_MAX_SIZE. */
3226 determine_block_size (tree len
, rtx len_rtx
,
3227 unsigned HOST_WIDE_INT
*min_size
,
3228 unsigned HOST_WIDE_INT
*max_size
,
3229 unsigned HOST_WIDE_INT
*probable_max_size
)
3231 if (CONST_INT_P (len_rtx
))
3233 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3239 enum value_range_kind range_type
= VR_UNDEFINED
;
3241 /* Determine bounds from the type. */
3242 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3243 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3246 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3247 *probable_max_size
= *max_size
3248 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3250 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3252 if (TREE_CODE (len
) == SSA_NAME
)
3253 range_type
= get_range_info (len
, &min
, &max
);
3254 if (range_type
== VR_RANGE
)
3256 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3257 *min_size
= min
.to_uhwi ();
3258 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3259 *probable_max_size
= *max_size
= max
.to_uhwi ();
3261 else if (range_type
== VR_ANTI_RANGE
)
3263 /* Anti range 0...N lets us to determine minimal size to N+1. */
3266 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3267 *min_size
= max
.to_uhwi () + 1;
3275 Produce anti range allowing negative values of N. We still
3276 can use the information and make a guess that N is not negative.
3278 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3279 *probable_max_size
= min
.to_uhwi () - 1;
3282 gcc_checking_assert (*max_size
<=
3283 (unsigned HOST_WIDE_INT
)
3284 GET_MODE_MASK (GET_MODE (len_rtx
)));
3287 /* Try to verify that the sizes and lengths of the arguments to a string
3288 manipulation function given by EXP are within valid bounds and that
3289 the operation does not lead to buffer overflow or read past the end.
3290 Arguments other than EXP may be null. When non-null, the arguments
3291 have the following meaning:
3292 DST is the destination of a copy call or NULL otherwise.
3293 SRC is the source of a copy call or NULL otherwise.
3294 DSTWRITE is the number of bytes written into the destination obtained
3295 from the user-supplied size argument to the function (such as in
3296 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3297 MAXREAD is the user-supplied bound on the length of the source sequence
3298 (such as in strncat(d, s, N). It specifies the upper limit on the number
3299 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3300 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3301 expression EXP is a string function call (as opposed to a memory call
3302 like memcpy). As an exception, SRCSTR can also be an integer denoting
3303 the precomputed size of the source string or object (for functions like
3305 DSTSIZE is the size of the destination object specified by the last
3306 argument to the _chk builtins, typically resulting from the expansion
3307 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3310 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3313 If the call is successfully verified as safe return true, otherwise
3317 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3318 tree maxread
, tree srcstr
, tree dstsize
)
3320 int opt
= OPT_Wstringop_overflow_
;
3322 /* The size of the largest object is half the address space, or
3323 PTRDIFF_MAX. (This is way too permissive.) */
3324 tree maxobjsize
= max_object_size ();
3326 /* Either the length of the source string for string functions or
3327 the size of the source object for raw memory functions. */
3328 tree slen
= NULL_TREE
;
3330 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3332 /* Set to true when the exact number of bytes written by a string
3333 function like strcpy is not known and the only thing that is
3334 known is that it must be at least one (for the terminating nul). */
3335 bool at_least_one
= false;
3338 /* SRCSTR is normally a pointer to string but as a special case
3339 it can be an integer denoting the length of a string. */
3340 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3342 /* Try to determine the range of lengths the source string
3343 refers to. If it can be determined and is less than
3344 the upper bound given by MAXREAD add one to it for
3345 the terminating nul. Otherwise, set it to one for
3346 the same reason, or to MAXREAD as appropriate. */
3347 get_range_strlen (srcstr
, range
);
3348 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3350 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3351 range
[0] = range
[1] = maxread
;
3353 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3354 range
[0], size_one_node
);
3356 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3358 else if (!integer_all_onesp (range
[1]))
3359 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3360 range
[1], size_one_node
);
3366 at_least_one
= true;
3367 slen
= size_one_node
;
3374 if (!dstwrite
&& !maxread
)
3376 /* When the only available piece of data is the object size
3377 there is nothing to do. */
3381 /* Otherwise, when the length of the source sequence is known
3382 (as with strlen), set DSTWRITE to it. */
3388 dstsize
= maxobjsize
;
3391 get_size_range (dstwrite
, range
);
3393 tree func
= get_callee_fndecl (exp
);
3395 /* First check the number of bytes to be written against the maximum
3398 && TREE_CODE (range
[0]) == INTEGER_CST
3399 && tree_int_cst_lt (maxobjsize
, range
[0]))
3401 if (TREE_NO_WARNING (exp
))
3404 location_t loc
= tree_nonartificial_location (exp
);
3405 loc
= expansion_point_location_if_in_system_header (loc
);
3408 if (range
[0] == range
[1])
3409 warned
= warning_at (loc
, opt
,
3410 "%K%qD specified size %E "
3411 "exceeds maximum object size %E",
3412 exp
, func
, range
[0], maxobjsize
);
3414 warned
= warning_at (loc
, opt
,
3415 "%K%qD specified size between %E and %E "
3416 "exceeds maximum object size %E",
3418 range
[0], range
[1], maxobjsize
);
3420 TREE_NO_WARNING (exp
) = true;
3425 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3426 constant, and in range of unsigned HOST_WIDE_INT. */
3427 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3429 /* Next check the number of bytes to be written against the destination
3431 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3434 && TREE_CODE (range
[0]) == INTEGER_CST
3435 && ((tree_fits_uhwi_p (dstsize
)
3436 && tree_int_cst_lt (dstsize
, range
[0]))
3438 && tree_fits_uhwi_p (dstwrite
)
3439 && tree_int_cst_lt (dstwrite
, range
[0]))))
3441 if (TREE_NO_WARNING (exp
))
3444 location_t loc
= tree_nonartificial_location (exp
);
3445 loc
= expansion_point_location_if_in_system_header (loc
);
3447 if (dstwrite
== slen
&& at_least_one
)
3449 /* This is a call to strcpy with a destination of 0 size
3450 and a source of unknown length. The call will write
3451 at least one byte past the end of the destination. */
3452 warning_at (loc
, opt
,
3453 "%K%qD writing %E or more bytes into a region "
3454 "of size %E overflows the destination",
3455 exp
, func
, range
[0], dstsize
);
3457 else if (tree_int_cst_equal (range
[0], range
[1]))
3458 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3459 "%K%qD writing %E byte into a region "
3460 "of size %E overflows the destination",
3461 "%K%qD writing %E bytes into a region "
3462 "of size %E overflows the destination",
3463 exp
, func
, range
[0], dstsize
);
3464 else if (tree_int_cst_sign_bit (range
[1]))
3466 /* Avoid printing the upper bound if it's invalid. */
3467 warning_at (loc
, opt
,
3468 "%K%qD writing %E or more bytes into a region "
3469 "of size %E overflows the destination",
3470 exp
, func
, range
[0], dstsize
);
3473 warning_at (loc
, opt
,
3474 "%K%qD writing between %E and %E bytes into "
3475 "a region of size %E overflows the destination",
3476 exp
, func
, range
[0], range
[1],
3479 /* Return error when an overflow has been detected. */
3484 /* Check the maximum length of the source sequence against the size
3485 of the destination object if known, or against the maximum size
3489 get_size_range (maxread
, range
);
3491 /* Use the lower end for MAXREAD from now on. */
3495 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3497 location_t loc
= tree_nonartificial_location (exp
);
3498 loc
= expansion_point_location_if_in_system_header (loc
);
3500 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3502 if (TREE_NO_WARNING (exp
))
3505 /* Warn about crazy big sizes first since that's more
3506 likely to be meaningful than saying that the bound
3507 is greater than the object size if both are big. */
3508 if (range
[0] == range
[1])
3509 warning_at (loc
, opt
,
3510 "%K%qD specified bound %E "
3511 "exceeds maximum object size %E",
3513 range
[0], maxobjsize
);
3515 warning_at (loc
, opt
,
3516 "%K%qD specified bound between %E and %E "
3517 "exceeds maximum object size %E",
3519 range
[0], range
[1], maxobjsize
);
3524 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3526 if (TREE_NO_WARNING (exp
))
3529 if (tree_int_cst_equal (range
[0], range
[1]))
3530 warning_at (loc
, opt
,
3531 "%K%qD specified bound %E "
3532 "exceeds destination size %E",
3536 warning_at (loc
, opt
,
3537 "%K%qD specified bound between %E and %E "
3538 "exceeds destination size %E",
3540 range
[0], range
[1], dstsize
);
3546 /* Check for reading past the end of SRC. */
3549 && dstwrite
&& range
[0]
3550 && tree_int_cst_lt (slen
, range
[0]))
3552 if (TREE_NO_WARNING (exp
))
3555 location_t loc
= tree_nonartificial_location (exp
);
3557 if (tree_int_cst_equal (range
[0], range
[1]))
3558 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3559 "%K%qD reading %E byte from a region of size %E",
3560 "%K%qD reading %E bytes from a region of size %E",
3561 exp
, func
, range
[0], slen
);
3562 else if (tree_int_cst_sign_bit (range
[1]))
3564 /* Avoid printing the upper bound if it's invalid. */
3565 warning_at (loc
, opt
,
3566 "%K%qD reading %E or more bytes from a region "
3568 exp
, func
, range
[0], slen
);
3571 warning_at (loc
, opt
,
3572 "%K%qD reading between %E and %E bytes from a region "
3574 exp
, func
, range
[0], range
[1], slen
);
3581 /* Helper to compute the size of the object referenced by the DEST
3582 expression which must have pointer type, using Object Size type
3583 OSTYPE (only the least significant 2 bits are used). Return
3584 an estimate of the size of the object if successful or NULL when
3585 the size cannot be determined. When the referenced object involves
3586 a non-constant offset in some range the returned value represents
3587 the largest size given the smallest non-negative offset in the
3588 range. The function is intended for diagnostics and should not
3589 be used to influence code generation or optimization. */
3592 compute_objsize (tree dest
, int ostype
)
3594 unsigned HOST_WIDE_INT size
;
3596 /* Only the two least significant bits are meaningful. */
3599 if (compute_builtin_object_size (dest
, ostype
, &size
))
3600 return build_int_cst (sizetype
, size
);
3602 if (TREE_CODE (dest
) == SSA_NAME
)
3604 gimple
*stmt
= SSA_NAME_DEF_STMT (dest
);
3605 if (!is_gimple_assign (stmt
))
3608 dest
= gimple_assign_rhs1 (stmt
);
3610 tree_code code
= gimple_assign_rhs_code (stmt
);
3611 if (code
== POINTER_PLUS_EXPR
)
3613 /* compute_builtin_object_size fails for addresses with
3614 non-constant offsets. Try to determine the range of
3615 such an offset here and use it to adjust the constant
3617 tree off
= gimple_assign_rhs2 (stmt
);
3618 if (TREE_CODE (off
) == INTEGER_CST
)
3620 if (tree size
= compute_objsize (dest
, ostype
))
3622 wide_int wioff
= wi::to_wide (off
);
3623 wide_int wisiz
= wi::to_wide (size
);
3625 /* Ignore negative offsets for now. For others,
3626 use the lower bound as the most optimistic
3627 estimate of the (remaining) size. */
3628 if (wi::sign_mask (wioff
))
3630 else if (wi::ltu_p (wioff
, wisiz
))
3631 return wide_int_to_tree (TREE_TYPE (size
),
3632 wi::sub (wisiz
, wioff
));
3634 return size_zero_node
;
3637 else if (TREE_CODE (off
) == SSA_NAME
3638 && INTEGRAL_TYPE_P (TREE_TYPE (off
)))
3641 enum value_range_kind rng
= get_range_info (off
, &min
, &max
);
3643 if (rng
== VR_RANGE
)
3645 if (tree size
= compute_objsize (dest
, ostype
))
3647 wide_int wisiz
= wi::to_wide (size
);
3649 /* Ignore negative offsets for now. For others,
3650 use the lower bound as the most optimistic
3651 estimate of the (remaining)size. */
3652 if (wi::sign_mask (min
))
3654 else if (wi::ltu_p (min
, wisiz
))
3655 return wide_int_to_tree (TREE_TYPE (size
),
3656 wi::sub (wisiz
, min
));
3658 return size_zero_node
;
3663 else if (code
!= ADDR_EXPR
)
3667 /* Unless computing the largest size (for memcpy and other raw memory
3668 functions), try to determine the size of the object from its type. */
3672 if (TREE_CODE (dest
) != ADDR_EXPR
)
3675 tree type
= TREE_TYPE (dest
);
3676 if (TREE_CODE (type
) == POINTER_TYPE
)
3677 type
= TREE_TYPE (type
);
3679 type
= TYPE_MAIN_VARIANT (type
);
3681 if (TREE_CODE (type
) == ARRAY_TYPE
3682 && !array_at_struct_end_p (TREE_OPERAND (dest
, 0)))
3684 /* Return the constant size unless it's zero (that's a zero-length
3685 array likely at the end of a struct). */
3686 tree size
= TYPE_SIZE_UNIT (type
);
3687 if (size
&& TREE_CODE (size
) == INTEGER_CST
3688 && !integer_zerop (size
))
3695 /* Helper to determine and check the sizes of the source and the destination
3696 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3697 call expression, DEST is the destination argument, SRC is the source
3698 argument or null, and LEN is the number of bytes. Use Object Size type-0
3699 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3700 (no overflow or invalid sizes), false otherwise. */
3703 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
3705 /* For functions like memset and memcpy that operate on raw memory
3706 try to determine the size of the largest source and destination
3707 object using type-0 Object Size regardless of the object size
3708 type specified by the option. */
3709 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3710 tree dstsize
= compute_objsize (dest
, 0);
3712 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
3716 /* Validate memchr arguments without performing any expansion.
3720 expand_builtin_memchr (tree exp
, rtx
)
3722 if (!validate_arglist (exp
,
3723 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3726 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3727 tree len
= CALL_EXPR_ARG (exp
, 2);
3729 /* Diagnose calls where the specified length exceeds the size
3731 if (warn_stringop_overflow
)
3733 tree size
= compute_objsize (arg1
, 0);
3734 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
3735 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
3741 /* Expand a call EXP to the memcpy builtin.
3742 Return NULL_RTX if we failed, the caller should emit a normal call,
3743 otherwise try to get the result in TARGET, if convenient (and in
3744 mode MODE if that's convenient). */
3747 expand_builtin_memcpy (tree exp
, rtx target
)
3749 if (!validate_arglist (exp
,
3750 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3753 tree dest
= CALL_EXPR_ARG (exp
, 0);
3754 tree src
= CALL_EXPR_ARG (exp
, 1);
3755 tree len
= CALL_EXPR_ARG (exp
, 2);
3757 check_memop_access (exp
, dest
, src
, len
);
3759 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3760 /*retmode=*/ RETURN_BEGIN
);
3763 /* Check a call EXP to the memmove built-in for validity.
3764 Return NULL_RTX on both success and failure. */
3767 expand_builtin_memmove (tree exp
, rtx
)
3769 if (!validate_arglist (exp
,
3770 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3773 tree dest
= CALL_EXPR_ARG (exp
, 0);
3774 tree src
= CALL_EXPR_ARG (exp
, 1);
3775 tree len
= CALL_EXPR_ARG (exp
, 2);
3777 check_memop_access (exp
, dest
, src
, len
);
3782 /* Expand a call EXP to the mempcpy builtin.
3783 Return NULL_RTX if we failed; the caller should emit a normal call,
3784 otherwise try to get the result in TARGET, if convenient (and in
3785 mode MODE if that's convenient). */
3788 expand_builtin_mempcpy (tree exp
, rtx target
)
3790 if (!validate_arglist (exp
,
3791 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3794 tree dest
= CALL_EXPR_ARG (exp
, 0);
3795 tree src
= CALL_EXPR_ARG (exp
, 1);
3796 tree len
= CALL_EXPR_ARG (exp
, 2);
3798 /* Policy does not generally allow using compute_objsize (which
3799 is used internally by check_memop_size) to change code generation
3800 or drive optimization decisions.
3802 In this instance it is safe because the code we generate has
3803 the same semantics regardless of the return value of
3804 check_memop_sizes. Exactly the same amount of data is copied
3805 and the return value is exactly the same in both cases.
3807 Furthermore, check_memop_size always uses mode 0 for the call to
3808 compute_objsize, so the imprecise nature of compute_objsize is
3811 /* Avoid expanding mempcpy into memcpy when the call is determined
3812 to overflow the buffer. This also prevents the same overflow
3813 from being diagnosed again when expanding memcpy. */
3814 if (!check_memop_access (exp
, dest
, src
, len
))
3817 return expand_builtin_mempcpy_args (dest
, src
, len
,
3818 target
, exp
, /*retmode=*/ RETURN_END
);
3821 /* Helper function to do the actual work for expand of memory copy family
3822 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3823 of memory from SRC to DEST and assign to TARGET if convenient. Return
3824 value is based on RETMODE argument. */
3827 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3828 rtx target
, tree exp
, memop_ret retmode
)
3830 const char *src_str
;
3831 unsigned int src_align
= get_pointer_alignment (src
);
3832 unsigned int dest_align
= get_pointer_alignment (dest
);
3833 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3834 HOST_WIDE_INT expected_size
= -1;
3835 unsigned int expected_align
= 0;
3836 unsigned HOST_WIDE_INT min_size
;
3837 unsigned HOST_WIDE_INT max_size
;
3838 unsigned HOST_WIDE_INT probable_max_size
;
3840 /* If DEST is not a pointer type, call the normal function. */
3841 if (dest_align
== 0)
3844 /* If either SRC is not a pointer type, don't do this
3845 operation in-line. */
3849 if (currently_expanding_gimple_stmt
)
3850 stringop_block_profile (currently_expanding_gimple_stmt
,
3851 &expected_align
, &expected_size
);
3853 if (expected_align
< dest_align
)
3854 expected_align
= dest_align
;
3855 dest_mem
= get_memory_rtx (dest
, len
);
3856 set_mem_align (dest_mem
, dest_align
);
3857 len_rtx
= expand_normal (len
);
3858 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3859 &probable_max_size
);
3860 src_str
= c_getstr (src
);
3862 /* If SRC is a string constant and block move would be done
3863 by pieces, we can avoid loading the string from memory
3864 and only stored the computed constants. */
3866 && CONST_INT_P (len_rtx
)
3867 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3868 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3869 CONST_CAST (char *, src_str
),
3872 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3873 builtin_memcpy_read_str
,
3874 CONST_CAST (char *, src_str
),
3875 dest_align
, false, retmode
);
3876 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3877 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3881 src_mem
= get_memory_rtx (src
, len
);
3882 set_mem_align (src_mem
, src_align
);
3884 /* Copy word part most expediently. */
3885 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3886 if (CALL_EXPR_TAILCALL (exp
)
3887 && (retmode
== RETURN_BEGIN
|| target
== const0_rtx
))
3888 method
= BLOCK_OP_TAILCALL
;
3889 if (retmode
== RETURN_END
&& target
!= const0_rtx
)
3890 method
= BLOCK_OP_NO_LIBCALL_RET
;
3891 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3892 expected_align
, expected_size
,
3893 min_size
, max_size
, probable_max_size
);
3894 if (dest_addr
== pc_rtx
)
3899 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3900 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3903 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3905 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3906 /* stpcpy pointer to last byte. */
3907 if (retmode
== RETURN_END_MINUS_ONE
)
3908 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3915 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3916 rtx target
, tree orig_exp
, memop_ret retmode
)
3918 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3922 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3923 we failed, the caller should emit a normal call, otherwise try to
3924 get the result in TARGET, if convenient.
3925 Return value is based on RETMODE argument. */
3928 expand_movstr (tree dest
, tree src
, rtx target
, memop_ret retmode
)
3930 struct expand_operand ops
[3];
3934 if (!targetm
.have_movstr ())
3937 dest_mem
= get_memory_rtx (dest
, NULL
);
3938 src_mem
= get_memory_rtx (src
, NULL
);
3939 if (retmode
== RETURN_BEGIN
)
3941 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3942 dest_mem
= replace_equiv_address (dest_mem
, target
);
3945 create_output_operand (&ops
[0], retmode
? target
: NULL_RTX
, Pmode
);
3946 create_fixed_operand (&ops
[1], dest_mem
);
3947 create_fixed_operand (&ops
[2], src_mem
);
3948 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3951 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3953 target
= ops
[0].value
;
3954 /* movstr is supposed to set end to the address of the NUL
3955 terminator. If the caller requested a mempcpy-like return value,
3957 if (retmode
== RETURN_END
)
3959 rtx tem
= plus_constant (GET_MODE (target
),
3960 gen_lowpart (GET_MODE (target
), target
), 1);
3961 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3967 /* Do some very basic size validation of a call to the strcpy builtin
3968 given by EXP. Return NULL_RTX to have the built-in expand to a call
3969 to the library function. */
3972 expand_builtin_strcat (tree exp
, rtx
)
3974 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3975 || !warn_stringop_overflow
)
3978 tree dest
= CALL_EXPR_ARG (exp
, 0);
3979 tree src
= CALL_EXPR_ARG (exp
, 1);
3981 /* There is no way here to determine the length of the string in
3982 the destination to which the SRC string is being appended so
3983 just diagnose cases when the souce string is longer than
3984 the destination object. */
3986 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3988 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
3994 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3995 NULL_RTX if we failed the caller should emit a normal call, otherwise
3996 try to get the result in TARGET, if convenient (and in mode MODE if that's
4000 expand_builtin_strcpy (tree exp
, rtx target
)
4002 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4005 tree dest
= CALL_EXPR_ARG (exp
, 0);
4006 tree src
= CALL_EXPR_ARG (exp
, 1);
4008 if (warn_stringop_overflow
)
4010 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4011 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4015 if (rtx ret
= expand_builtin_strcpy_args (exp
, dest
, src
, target
))
4017 /* Check to see if the argument was declared attribute nonstring
4018 and if so, issue a warning since at this point it's not known
4019 to be nul-terminated. */
4020 tree fndecl
= get_callee_fndecl (exp
);
4021 maybe_warn_nonstring_arg (fndecl
, exp
);
4028 /* Helper function to do the actual work for expand_builtin_strcpy. The
4029 arguments to the builtin_strcpy call DEST and SRC are broken out
4030 so that this can also be called without constructing an actual CALL_EXPR.
4031 The other arguments and return value are the same as for
4032 expand_builtin_strcpy. */
4035 expand_builtin_strcpy_args (tree exp
, tree dest
, tree src
, rtx target
)
4037 /* Detect strcpy calls with unterminated arrays.. */
4038 if (tree nonstr
= unterminated_array (src
))
4040 /* NONSTR refers to the non-nul terminated constant array. */
4041 if (!TREE_NO_WARNING (exp
))
4042 warn_string_no_nul (EXPR_LOCATION (exp
), "strcpy", src
, nonstr
);
4046 return expand_movstr (dest
, src
, target
, /*retmode=*/ RETURN_BEGIN
);
4049 /* Expand a call EXP to the stpcpy builtin.
4050 Return NULL_RTX if we failed the caller should emit a normal call,
4051 otherwise try to get the result in TARGET, if convenient (and in
4052 mode MODE if that's convenient). */
4055 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
4058 location_t loc
= EXPR_LOCATION (exp
);
4060 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4063 dst
= CALL_EXPR_ARG (exp
, 0);
4064 src
= CALL_EXPR_ARG (exp
, 1);
4066 if (warn_stringop_overflow
)
4068 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
4069 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4073 /* If return value is ignored, transform stpcpy into strcpy. */
4074 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
4076 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
4077 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
4078 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4085 /* Ensure we get an actual string whose length can be evaluated at
4086 compile-time, not an expression containing a string. This is
4087 because the latter will potentially produce pessimized code
4088 when used to produce the return value. */
4090 memset (&data
, 0, sizeof (c_strlen_data
));
4091 if (!c_getstr (src
, NULL
)
4092 || !(len
= c_strlen (src
, 0, &data
, 1)))
4093 return expand_movstr (dst
, src
, target
,
4094 /*retmode=*/ RETURN_END_MINUS_ONE
);
4096 if (data
.decl
&& !TREE_NO_WARNING (exp
))
4097 warn_string_no_nul (EXPR_LOCATION (exp
), "stpcpy", src
, data
.decl
);
4099 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
4100 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
4102 /*retmode=*/ RETURN_END_MINUS_ONE
);
4107 if (TREE_CODE (len
) == INTEGER_CST
)
4109 rtx len_rtx
= expand_normal (len
);
4111 if (CONST_INT_P (len_rtx
))
4113 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
4119 if (mode
!= VOIDmode
)
4120 target
= gen_reg_rtx (mode
);
4122 target
= gen_reg_rtx (GET_MODE (ret
));
4124 if (GET_MODE (target
) != GET_MODE (ret
))
4125 ret
= gen_lowpart (GET_MODE (target
), ret
);
4127 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
4128 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
4136 return expand_movstr (dst
, src
, target
,
4137 /*retmode=*/ RETURN_END_MINUS_ONE
);
4141 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4142 arguments while being careful to avoid duplicate warnings (which could
4143 be issued if the expander were to expand the call, resulting in it
4144 being emitted in expand_call(). */
4147 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
4149 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
4151 /* The call has been successfully expanded. Check for nonstring
4152 arguments and issue warnings as appropriate. */
4153 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
4160 /* Check a call EXP to the stpncpy built-in for validity.
4161 Return NULL_RTX on both success and failure. */
4164 expand_builtin_stpncpy (tree exp
, rtx
)
4166 if (!validate_arglist (exp
,
4167 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4168 || !warn_stringop_overflow
)
4171 /* The source and destination of the call. */
4172 tree dest
= CALL_EXPR_ARG (exp
, 0);
4173 tree src
= CALL_EXPR_ARG (exp
, 1);
4175 /* The exact number of bytes to write (not the maximum). */
4176 tree len
= CALL_EXPR_ARG (exp
, 2);
4178 /* The size of the destination object. */
4179 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4181 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
);
4186 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4187 bytes from constant string DATA + OFFSET and return it as target
4191 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
4192 scalar_int_mode mode
)
4194 const char *str
= (const char *) data
;
4196 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
4199 return c_readstr (str
+ offset
, mode
);
4202 /* Helper to check the sizes of sequences and the destination of calls
4203 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4204 success (no overflow or invalid sizes), false otherwise. */
4207 check_strncat_sizes (tree exp
, tree objsize
)
4209 tree dest
= CALL_EXPR_ARG (exp
, 0);
4210 tree src
= CALL_EXPR_ARG (exp
, 1);
4211 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4213 /* Try to determine the range of lengths that the source expression
4216 get_range_strlen (src
, lenrange
);
4218 /* Try to verify that the destination is big enough for the shortest
4221 if (!objsize
&& warn_stringop_overflow
)
4223 /* If it hasn't been provided by __strncat_chk, try to determine
4224 the size of the destination object into which the source is
4226 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4229 /* Add one for the terminating nul. */
4230 tree srclen
= (lenrange
[0]
4231 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4235 /* The strncat function copies at most MAXREAD bytes and always appends
4236 the terminating nul so the specified upper bound should never be equal
4237 to (or greater than) the size of the destination. */
4238 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
4239 && tree_int_cst_equal (objsize
, maxread
))
4241 location_t loc
= tree_nonartificial_location (exp
);
4242 loc
= expansion_point_location_if_in_system_header (loc
);
4244 warning_at (loc
, OPT_Wstringop_overflow_
,
4245 "%K%qD specified bound %E equals destination size",
4246 exp
, get_callee_fndecl (exp
), maxread
);
4252 || (maxread
&& tree_fits_uhwi_p (maxread
)
4253 && tree_fits_uhwi_p (srclen
)
4254 && tree_int_cst_lt (maxread
, srclen
)))
4257 /* The number of bytes to write is LEN but check_access will also
4258 check SRCLEN if LEN's value isn't known. */
4259 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
4263 /* Similar to expand_builtin_strcat, do some very basic size validation
4264 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4265 the built-in expand to a call to the library function. */
4268 expand_builtin_strncat (tree exp
, rtx
)
4270 if (!validate_arglist (exp
,
4271 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4272 || !warn_stringop_overflow
)
4275 tree dest
= CALL_EXPR_ARG (exp
, 0);
4276 tree src
= CALL_EXPR_ARG (exp
, 1);
4277 /* The upper bound on the number of bytes to write. */
4278 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4279 /* The length of the source sequence. */
4280 tree slen
= c_strlen (src
, 1);
4282 /* Try to determine the range of lengths that the source expression
4286 lenrange
[0] = lenrange
[1] = slen
;
4288 get_range_strlen (src
, lenrange
);
4290 /* Try to verify that the destination is big enough for the shortest
4291 string. First try to determine the size of the destination object
4292 into which the source is being copied. */
4293 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4295 /* Add one for the terminating nul. */
4296 tree srclen
= (lenrange
[0]
4297 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4301 /* The strncat function copies at most MAXREAD bytes and always appends
4302 the terminating nul so the specified upper bound should never be equal
4303 to (or greater than) the size of the destination. */
4304 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4305 && tree_int_cst_equal (destsize
, maxread
))
4307 location_t loc
= tree_nonartificial_location (exp
);
4308 loc
= expansion_point_location_if_in_system_header (loc
);
4310 warning_at (loc
, OPT_Wstringop_overflow_
,
4311 "%K%qD specified bound %E equals destination size",
4312 exp
, get_callee_fndecl (exp
), maxread
);
4318 || (maxread
&& tree_fits_uhwi_p (maxread
)
4319 && tree_fits_uhwi_p (srclen
)
4320 && tree_int_cst_lt (maxread
, srclen
)))
4323 /* The number of bytes to write is SRCLEN. */
4324 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
);
4329 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4330 NULL_RTX if we failed the caller should emit a normal call. */
4333 expand_builtin_strncpy (tree exp
, rtx target
)
4335 location_t loc
= EXPR_LOCATION (exp
);
4337 if (validate_arglist (exp
,
4338 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4340 tree dest
= CALL_EXPR_ARG (exp
, 0);
4341 tree src
= CALL_EXPR_ARG (exp
, 1);
4342 /* The number of bytes to write (not the maximum). */
4343 tree len
= CALL_EXPR_ARG (exp
, 2);
4344 /* The length of the source sequence. */
4345 tree slen
= c_strlen (src
, 1);
4347 if (warn_stringop_overflow
)
4349 tree destsize
= compute_objsize (dest
,
4350 warn_stringop_overflow
- 1);
4352 /* The number of bytes to write is LEN but check_access will also
4353 check SLEN if LEN's value isn't known. */
4354 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4358 /* We must be passed a constant len and src parameter. */
4359 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4362 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4364 /* We're required to pad with trailing zeros if the requested
4365 len is greater than strlen(s2)+1. In that case try to
4366 use store_by_pieces, if it fails, punt. */
4367 if (tree_int_cst_lt (slen
, len
))
4369 unsigned int dest_align
= get_pointer_alignment (dest
);
4370 const char *p
= c_getstr (src
);
4373 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4374 || !can_store_by_pieces (tree_to_uhwi (len
),
4375 builtin_strncpy_read_str
,
4376 CONST_CAST (char *, p
),
4380 dest_mem
= get_memory_rtx (dest
, len
);
4381 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4382 builtin_strncpy_read_str
,
4383 CONST_CAST (char *, p
), dest_align
, false,
4385 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4386 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4393 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4394 bytes from constant string DATA + OFFSET and return it as target
4398 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4399 scalar_int_mode mode
)
4401 const char *c
= (const char *) data
;
4402 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4404 memset (p
, *c
, GET_MODE_SIZE (mode
));
4406 return c_readstr (p
, mode
);
4409 /* Callback routine for store_by_pieces. Return the RTL of a register
4410 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4411 char value given in the RTL register data. For example, if mode is
4412 4 bytes wide, return the RTL for 0x01010101*data. */
4415 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4416 scalar_int_mode mode
)
4422 size
= GET_MODE_SIZE (mode
);
4426 p
= XALLOCAVEC (char, size
);
4427 memset (p
, 1, size
);
4428 coeff
= c_readstr (p
, mode
);
4430 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4431 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4432 return force_reg (mode
, target
);
4435 /* Expand expression EXP, which is a call to the memset builtin. Return
4436 NULL_RTX if we failed the caller should emit a normal call, otherwise
4437 try to get the result in TARGET, if convenient (and in mode MODE if that's
4441 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4443 if (!validate_arglist (exp
,
4444 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4447 tree dest
= CALL_EXPR_ARG (exp
, 0);
4448 tree val
= CALL_EXPR_ARG (exp
, 1);
4449 tree len
= CALL_EXPR_ARG (exp
, 2);
4451 check_memop_access (exp
, dest
, NULL_TREE
, len
);
4453 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4456 /* Helper function to do the actual work for expand_builtin_memset. The
4457 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4458 so that this can also be called without constructing an actual CALL_EXPR.
4459 The other arguments and return value are the same as for
4460 expand_builtin_memset. */
4463 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4464 rtx target
, machine_mode mode
, tree orig_exp
)
4467 enum built_in_function fcode
;
4468 machine_mode val_mode
;
4470 unsigned int dest_align
;
4471 rtx dest_mem
, dest_addr
, len_rtx
;
4472 HOST_WIDE_INT expected_size
= -1;
4473 unsigned int expected_align
= 0;
4474 unsigned HOST_WIDE_INT min_size
;
4475 unsigned HOST_WIDE_INT max_size
;
4476 unsigned HOST_WIDE_INT probable_max_size
;
4478 dest_align
= get_pointer_alignment (dest
);
4480 /* If DEST is not a pointer type, don't do this operation in-line. */
4481 if (dest_align
== 0)
4484 if (currently_expanding_gimple_stmt
)
4485 stringop_block_profile (currently_expanding_gimple_stmt
,
4486 &expected_align
, &expected_size
);
4488 if (expected_align
< dest_align
)
4489 expected_align
= dest_align
;
4491 /* If the LEN parameter is zero, return DEST. */
4492 if (integer_zerop (len
))
4494 /* Evaluate and ignore VAL in case it has side-effects. */
4495 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4496 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4499 /* Stabilize the arguments in case we fail. */
4500 dest
= builtin_save_expr (dest
);
4501 val
= builtin_save_expr (val
);
4502 len
= builtin_save_expr (len
);
4504 len_rtx
= expand_normal (len
);
4505 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4506 &probable_max_size
);
4507 dest_mem
= get_memory_rtx (dest
, len
);
4508 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4510 if (TREE_CODE (val
) != INTEGER_CST
)
4514 val_rtx
= expand_normal (val
);
4515 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4517 /* Assume that we can memset by pieces if we can store
4518 * the coefficients by pieces (in the required modes).
4519 * We can't pass builtin_memset_gen_str as that emits RTL. */
4521 if (tree_fits_uhwi_p (len
)
4522 && can_store_by_pieces (tree_to_uhwi (len
),
4523 builtin_memset_read_str
, &c
, dest_align
,
4526 val_rtx
= force_reg (val_mode
, val_rtx
);
4527 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4528 builtin_memset_gen_str
, val_rtx
, dest_align
,
4529 true, RETURN_BEGIN
);
4531 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4532 dest_align
, expected_align
,
4533 expected_size
, min_size
, max_size
,
4537 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4538 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4542 if (target_char_cast (val
, &c
))
4547 if (tree_fits_uhwi_p (len
)
4548 && can_store_by_pieces (tree_to_uhwi (len
),
4549 builtin_memset_read_str
, &c
, dest_align
,
4551 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4552 builtin_memset_read_str
, &c
, dest_align
, true,
4554 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4555 gen_int_mode (c
, val_mode
),
4556 dest_align
, expected_align
,
4557 expected_size
, min_size
, max_size
,
4561 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4562 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4566 set_mem_align (dest_mem
, dest_align
);
4567 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4568 CALL_EXPR_TAILCALL (orig_exp
)
4569 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4570 expected_align
, expected_size
,
4576 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4577 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4583 fndecl
= get_callee_fndecl (orig_exp
);
4584 fcode
= DECL_FUNCTION_CODE (fndecl
);
4585 if (fcode
== BUILT_IN_MEMSET
)
4586 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4588 else if (fcode
== BUILT_IN_BZERO
)
4589 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4593 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4594 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4595 return expand_call (fn
, target
, target
== const0_rtx
);
4598 /* Expand expression EXP, which is a call to the bzero builtin. Return
4599 NULL_RTX if we failed the caller should emit a normal call. */
4602 expand_builtin_bzero (tree exp
)
4604 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4607 tree dest
= CALL_EXPR_ARG (exp
, 0);
4608 tree size
= CALL_EXPR_ARG (exp
, 1);
4610 check_memop_access (exp
, dest
, NULL_TREE
, size
);
4612 /* New argument list transforming bzero(ptr x, int y) to
4613 memset(ptr x, int 0, size_t y). This is done this way
4614 so that if it isn't expanded inline, we fallback to
4615 calling bzero instead of memset. */
4617 location_t loc
= EXPR_LOCATION (exp
);
4619 return expand_builtin_memset_args (dest
, integer_zero_node
,
4620 fold_convert_loc (loc
,
4621 size_type_node
, size
),
4622 const0_rtx
, VOIDmode
, exp
);
4625 /* Try to expand cmpstr operation ICODE with the given operands.
4626 Return the result rtx on success, otherwise return null. */
4629 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4630 HOST_WIDE_INT align
)
4632 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4634 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4637 struct expand_operand ops
[4];
4638 create_output_operand (&ops
[0], target
, insn_mode
);
4639 create_fixed_operand (&ops
[1], arg1_rtx
);
4640 create_fixed_operand (&ops
[2], arg2_rtx
);
4641 create_integer_operand (&ops
[3], align
);
4642 if (maybe_expand_insn (icode
, 4, ops
))
4643 return ops
[0].value
;
4647 /* Expand expression EXP, which is a call to the memcmp built-in function.
4648 Return NULL_RTX if we failed and the caller should emit a normal call,
4649 otherwise try to get the result in TARGET, if convenient.
4650 RESULT_EQ is true if we can relax the returned value to be either zero
4651 or nonzero, without caring about the sign. */
4654 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4656 if (!validate_arglist (exp
,
4657 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4660 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4661 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4662 tree len
= CALL_EXPR_ARG (exp
, 2);
4663 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
4664 bool no_overflow
= true;
4666 /* Diagnose calls where the specified length exceeds the size of either
4668 tree size
= compute_objsize (arg1
, 0);
4669 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
4670 len
, /*maxread=*/NULL_TREE
, size
,
4671 /*objsize=*/NULL_TREE
);
4674 size
= compute_objsize (arg2
, 0);
4675 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
4676 len
, /*maxread=*/NULL_TREE
, size
,
4677 /*objsize=*/NULL_TREE
);
4680 /* If the specified length exceeds the size of either object,
4681 call the function. */
4685 /* Due to the performance benefit, always inline the calls first
4686 when result_eq is false. */
4687 rtx result
= NULL_RTX
;
4689 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
4691 result
= inline_expand_builtin_string_cmp (exp
, target
);
4696 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4697 location_t loc
= EXPR_LOCATION (exp
);
4699 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4700 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4702 /* If we don't have POINTER_TYPE, call the function. */
4703 if (arg1_align
== 0 || arg2_align
== 0)
4706 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4707 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4708 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4710 /* Set MEM_SIZE as appropriate. */
4711 if (CONST_INT_P (len_rtx
))
4713 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4714 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4717 by_pieces_constfn constfn
= NULL
;
4719 const char *src_str
= c_getstr (arg2
);
4720 if (result_eq
&& src_str
== NULL
)
4722 src_str
= c_getstr (arg1
);
4723 if (src_str
!= NULL
)
4724 std::swap (arg1_rtx
, arg2_rtx
);
4727 /* If SRC is a string constant and block move would be done
4728 by pieces, we can avoid loading the string from memory
4729 and only stored the computed constants. */
4731 && CONST_INT_P (len_rtx
)
4732 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4733 constfn
= builtin_memcpy_read_str
;
4735 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4736 TREE_TYPE (len
), target
,
4738 CONST_CAST (char *, src_str
));
4742 /* Return the value in the proper mode for this function. */
4743 if (GET_MODE (result
) == mode
)
4748 convert_move (target
, result
, 0);
4752 return convert_to_mode (mode
, result
, 0);
4758 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4759 if we failed the caller should emit a normal call, otherwise try to get
4760 the result in TARGET, if convenient. */
4763 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4765 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4768 /* Due to the performance benefit, always inline the calls first. */
4769 rtx result
= NULL_RTX
;
4770 result
= inline_expand_builtin_string_cmp (exp
, target
);
4774 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4775 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4776 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4779 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4780 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4782 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4783 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4785 /* If we don't have POINTER_TYPE, call the function. */
4786 if (arg1_align
== 0 || arg2_align
== 0)
4789 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4790 arg1
= builtin_save_expr (arg1
);
4791 arg2
= builtin_save_expr (arg2
);
4793 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4794 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4796 /* Try to call cmpstrsi. */
4797 if (cmpstr_icode
!= CODE_FOR_nothing
)
4798 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4799 MIN (arg1_align
, arg2_align
));
4801 /* Try to determine at least one length and call cmpstrnsi. */
4802 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4807 tree len1
= c_strlen (arg1
, 1);
4808 tree len2
= c_strlen (arg2
, 1);
4811 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4813 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4815 /* If we don't have a constant length for the first, use the length
4816 of the second, if we know it. We don't require a constant for
4817 this case; some cost analysis could be done if both are available
4818 but neither is constant. For now, assume they're equally cheap,
4819 unless one has side effects. If both strings have constant lengths,
4826 else if (TREE_SIDE_EFFECTS (len1
))
4828 else if (TREE_SIDE_EFFECTS (len2
))
4830 else if (TREE_CODE (len1
) != INTEGER_CST
)
4832 else if (TREE_CODE (len2
) != INTEGER_CST
)
4834 else if (tree_int_cst_lt (len1
, len2
))
4839 /* If both arguments have side effects, we cannot optimize. */
4840 if (len
&& !TREE_SIDE_EFFECTS (len
))
4842 arg3_rtx
= expand_normal (len
);
4843 result
= expand_cmpstrn_or_cmpmem
4844 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4845 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4849 tree fndecl
= get_callee_fndecl (exp
);
4852 /* Check to see if the argument was declared attribute nonstring
4853 and if so, issue a warning since at this point it's not known
4854 to be nul-terminated. */
4855 maybe_warn_nonstring_arg (fndecl
, exp
);
4857 /* Return the value in the proper mode for this function. */
4858 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4859 if (GET_MODE (result
) == mode
)
4862 return convert_to_mode (mode
, result
, 0);
4863 convert_move (target
, result
, 0);
4867 /* Expand the library call ourselves using a stabilized argument
4868 list to avoid re-evaluating the function's arguments twice. */
4869 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4870 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4871 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4872 return expand_call (fn
, target
, target
== const0_rtx
);
4875 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4876 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4877 the result in TARGET, if convenient. */
4880 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4881 ATTRIBUTE_UNUSED machine_mode mode
)
4883 if (!validate_arglist (exp
,
4884 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4887 /* Due to the performance benefit, always inline the calls first. */
4888 rtx result
= NULL_RTX
;
4889 result
= inline_expand_builtin_string_cmp (exp
, target
);
4893 /* If c_strlen can determine an expression for one of the string
4894 lengths, and it doesn't have side effects, then emit cmpstrnsi
4895 using length MIN(strlen(string)+1, arg3). */
4896 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4897 if (cmpstrn_icode
== CODE_FOR_nothing
)
4902 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4903 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4904 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4906 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4907 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4909 tree len1
= c_strlen (arg1
, 1);
4910 tree len2
= c_strlen (arg2
, 1);
4912 location_t loc
= EXPR_LOCATION (exp
);
4915 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4917 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4919 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4921 /* If we don't have a constant length for the first, use the length
4922 of the second, if we know it. If neither string is constant length,
4923 use the given length argument. We don't require a constant for
4924 this case; some cost analysis could be done if both are available
4925 but neither is constant. For now, assume they're equally cheap,
4926 unless one has side effects. If both strings have constant lengths,
4935 else if (TREE_SIDE_EFFECTS (len1
))
4937 else if (TREE_SIDE_EFFECTS (len2
))
4939 else if (TREE_CODE (len1
) != INTEGER_CST
)
4941 else if (TREE_CODE (len2
) != INTEGER_CST
)
4943 else if (tree_int_cst_lt (len1
, len2
))
4948 /* If we are not using the given length, we must incorporate it here.
4949 The actual new length parameter will be MIN(len,arg3) in this case. */
4952 len
= fold_convert_loc (loc
, sizetype
, len
);
4953 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4955 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4956 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4957 rtx arg3_rtx
= expand_normal (len
);
4958 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4959 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4960 MIN (arg1_align
, arg2_align
));
4962 tree fndecl
= get_callee_fndecl (exp
);
4965 /* Check to see if the argument was declared attribute nonstring
4966 and if so, issue a warning since at this point it's not known
4967 to be nul-terminated. */
4968 maybe_warn_nonstring_arg (fndecl
, exp
);
4970 /* Return the value in the proper mode for this function. */
4971 mode
= TYPE_MODE (TREE_TYPE (exp
));
4972 if (GET_MODE (result
) == mode
)
4975 return convert_to_mode (mode
, result
, 0);
4976 convert_move (target
, result
, 0);
4980 /* Expand the library call ourselves using a stabilized argument
4981 list to avoid re-evaluating the function's arguments twice. */
4982 tree fn
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
4983 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4984 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4985 return expand_call (fn
, target
, target
== const0_rtx
);
4988 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4989 if that's convenient. */
4992 expand_builtin_saveregs (void)
4997 /* Don't do __builtin_saveregs more than once in a function.
4998 Save the result of the first call and reuse it. */
4999 if (saveregs_value
!= 0)
5000 return saveregs_value
;
5002 /* When this function is called, it means that registers must be
5003 saved on entry to this function. So we migrate the call to the
5004 first insn of this function. */
5008 /* Do whatever the machine needs done in this case. */
5009 val
= targetm
.calls
.expand_builtin_saveregs ();
5014 saveregs_value
= val
;
5016 /* Put the insns after the NOTE that starts the function. If this
5017 is inside a start_sequence, make the outer-level insn chain current, so
5018 the code is placed at the start of the function. */
5019 push_topmost_sequence ();
5020 emit_insn_after (seq
, entry_of_function ());
5021 pop_topmost_sequence ();
5026 /* Expand a call to __builtin_next_arg. */
5029 expand_builtin_next_arg (void)
5031 /* Checking arguments is already done in fold_builtin_next_arg
5032 that must be called before this function. */
5033 return expand_binop (ptr_mode
, add_optab
,
5034 crtl
->args
.internal_arg_pointer
,
5035 crtl
->args
.arg_offset_rtx
,
5036 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
5039 /* Make it easier for the backends by protecting the valist argument
5040 from multiple evaluations. */
5043 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
5045 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
5047 /* The current way of determining the type of valist is completely
5048 bogus. We should have the information on the va builtin instead. */
5050 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
5052 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
5054 if (TREE_SIDE_EFFECTS (valist
))
5055 valist
= save_expr (valist
);
5057 /* For this case, the backends will be expecting a pointer to
5058 vatype, but it's possible we've actually been given an array
5059 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5061 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
5063 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
5064 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
5069 tree pt
= build_pointer_type (vatype
);
5073 if (! TREE_SIDE_EFFECTS (valist
))
5076 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
5077 TREE_SIDE_EFFECTS (valist
) = 1;
5080 if (TREE_SIDE_EFFECTS (valist
))
5081 valist
= save_expr (valist
);
5082 valist
= fold_build2_loc (loc
, MEM_REF
,
5083 vatype
, valist
, build_int_cst (pt
, 0));
5089 /* The "standard" definition of va_list is void*. */
5092 std_build_builtin_va_list (void)
5094 return ptr_type_node
;
5097 /* The "standard" abi va_list is va_list_type_node. */
5100 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
5102 return va_list_type_node
;
5105 /* The "standard" type of va_list is va_list_type_node. */
5108 std_canonical_va_list_type (tree type
)
5112 wtype
= va_list_type_node
;
5115 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
5117 /* If va_list is an array type, the argument may have decayed
5118 to a pointer type, e.g. by being passed to another function.
5119 In that case, unwrap both types so that we can compare the
5120 underlying records. */
5121 if (TREE_CODE (htype
) == ARRAY_TYPE
5122 || POINTER_TYPE_P (htype
))
5124 wtype
= TREE_TYPE (wtype
);
5125 htype
= TREE_TYPE (htype
);
5128 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
5129 return va_list_type_node
;
5134 /* The "standard" implementation of va_start: just assign `nextarg' to
5138 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
5140 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5141 convert_move (va_r
, nextarg
, 0);
5144 /* Expand EXP, a call to __builtin_va_start. */
5147 expand_builtin_va_start (tree exp
)
5151 location_t loc
= EXPR_LOCATION (exp
);
5153 if (call_expr_nargs (exp
) < 2)
5155 error_at (loc
, "too few arguments to function %<va_start%>");
5159 if (fold_builtin_next_arg (exp
, true))
5162 nextarg
= expand_builtin_next_arg ();
5163 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
5165 if (targetm
.expand_builtin_va_start
)
5166 targetm
.expand_builtin_va_start (valist
, nextarg
);
5168 std_expand_builtin_va_start (valist
, nextarg
);
5173 /* Expand EXP, a call to __builtin_va_end. */
5176 expand_builtin_va_end (tree exp
)
5178 tree valist
= CALL_EXPR_ARG (exp
, 0);
5180 /* Evaluate for side effects, if needed. I hate macros that don't
5182 if (TREE_SIDE_EFFECTS (valist
))
5183 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5188 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5189 builtin rather than just as an assignment in stdarg.h because of the
5190 nastiness of array-type va_list types. */
5193 expand_builtin_va_copy (tree exp
)
5196 location_t loc
= EXPR_LOCATION (exp
);
5198 dst
= CALL_EXPR_ARG (exp
, 0);
5199 src
= CALL_EXPR_ARG (exp
, 1);
5201 dst
= stabilize_va_list_loc (loc
, dst
, 1);
5202 src
= stabilize_va_list_loc (loc
, src
, 0);
5204 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5206 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5208 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5209 TREE_SIDE_EFFECTS (t
) = 1;
5210 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5214 rtx dstb
, srcb
, size
;
5216 /* Evaluate to pointers. */
5217 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5218 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5219 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5220 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5222 dstb
= convert_memory_address (Pmode
, dstb
);
5223 srcb
= convert_memory_address (Pmode
, srcb
);
5225 /* "Dereference" to BLKmode memories. */
5226 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5227 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5228 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5229 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5230 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5231 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5234 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5240 /* Expand a call to one of the builtin functions __builtin_frame_address or
5241 __builtin_return_address. */
5244 expand_builtin_frame_address (tree fndecl
, tree exp
)
5246 /* The argument must be a nonnegative integer constant.
5247 It counts the number of frames to scan up the stack.
5248 The value is either the frame pointer value or the return
5249 address saved in that frame. */
5250 if (call_expr_nargs (exp
) == 0)
5251 /* Warning about missing arg was already issued. */
5253 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
5255 error ("invalid argument to %qD", fndecl
);
5260 /* Number of frames to scan up the stack. */
5261 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5263 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5265 /* Some ports cannot access arbitrary stack frames. */
5268 warning (0, "unsupported argument to %qD", fndecl
);
5274 /* Warn since no effort is made to ensure that any frame
5275 beyond the current one exists or can be safely reached. */
5276 warning (OPT_Wframe_address
, "calling %qD with "
5277 "a nonzero argument is unsafe", fndecl
);
5280 /* For __builtin_frame_address, return what we've got. */
5281 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5285 && ! CONSTANT_P (tem
))
5286 tem
= copy_addr_to_reg (tem
);
5291 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5292 failed and the caller should emit a normal call. */
5295 expand_builtin_alloca (tree exp
)
5300 tree fndecl
= get_callee_fndecl (exp
);
5301 HOST_WIDE_INT max_size
;
5302 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5303 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5305 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5306 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5308 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5309 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5310 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5316 && warn_vla_limit
>= HOST_WIDE_INT_MAX
5317 && warn_alloc_size_limit
< warn_vla_limit
)
5319 && warn_alloca_limit
>= HOST_WIDE_INT_MAX
5320 && warn_alloc_size_limit
< warn_alloca_limit
5323 /* -Walloca-larger-than and -Wvla-larger-than settings of
5324 less than HOST_WIDE_INT_MAX override the more general
5325 -Walloc-size-larger-than so unless either of the former
5326 options is smaller than the last one (wchich would imply
5327 that the call was already checked), check the alloca
5328 arguments for overflow. */
5329 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5330 int idx
[] = { 0, -1 };
5331 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5334 /* Compute the argument. */
5335 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5337 /* Compute the alignment. */
5338 align
= (fcode
== BUILT_IN_ALLOCA
5340 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5342 /* Compute the maximum size. */
5343 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5344 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5347 /* Allocate the desired space. If the allocation stems from the declaration
5348 of a variable-sized object, it cannot accumulate. */
5350 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5351 result
= convert_memory_address (ptr_mode
, result
);
5356 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5357 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5358 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5359 handle_builtin_stack_restore function. */
5362 expand_asan_emit_allocas_unpoison (tree exp
)
5364 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5365 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5366 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5367 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5368 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
5369 stack_pointer_rtx
, NULL_RTX
, 0,
5371 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
5372 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
5374 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5375 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5376 top
, ptr_mode
, bot
, ptr_mode
);
5380 /* Expand a call to bswap builtin in EXP.
5381 Return NULL_RTX if a normal call should be emitted rather than expanding the
5382 function in-line. If convenient, the result should be placed in TARGET.
5383 SUBTARGET may be used as the target for computing one of EXP's operands. */
5386 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5392 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5395 arg
= CALL_EXPR_ARG (exp
, 0);
5396 op0
= expand_expr (arg
,
5397 subtarget
&& GET_MODE (subtarget
) == target_mode
5398 ? subtarget
: NULL_RTX
,
5399 target_mode
, EXPAND_NORMAL
);
5400 if (GET_MODE (op0
) != target_mode
)
5401 op0
= convert_to_mode (target_mode
, op0
, 1);
5403 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5405 gcc_assert (target
);
5407 return convert_to_mode (target_mode
, target
, 1);
5410 /* Expand a call to a unary builtin in EXP.
5411 Return NULL_RTX if a normal call should be emitted rather than expanding the
5412 function in-line. If convenient, the result should be placed in TARGET.
5413 SUBTARGET may be used as the target for computing one of EXP's operands. */
5416 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5417 rtx subtarget
, optab op_optab
)
5421 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5424 /* Compute the argument. */
5425 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5427 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5428 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5429 VOIDmode
, EXPAND_NORMAL
);
5430 /* Compute op, into TARGET if possible.
5431 Set TARGET to wherever the result comes back. */
5432 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5433 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5434 gcc_assert (target
);
5436 return convert_to_mode (target_mode
, target
, 0);
5439 /* Expand a call to __builtin_expect. We just return our argument
5440 as the builtin_expect semantic should've been already executed by
5441 tree branch prediction pass. */
5444 expand_builtin_expect (tree exp
, rtx target
)
5448 if (call_expr_nargs (exp
) < 2)
5450 arg
= CALL_EXPR_ARG (exp
, 0);
5452 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5453 /* When guessing was done, the hints should be already stripped away. */
5454 gcc_assert (!flag_guess_branch_prob
5455 || optimize
== 0 || seen_error ());
5459 /* Expand a call to __builtin_expect_with_probability. We just return our
5460 argument as the builtin_expect semantic should've been already executed by
5461 tree branch prediction pass. */
5464 expand_builtin_expect_with_probability (tree exp
, rtx target
)
5468 if (call_expr_nargs (exp
) < 3)
5470 arg
= CALL_EXPR_ARG (exp
, 0);
5472 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5473 /* When guessing was done, the hints should be already stripped away. */
5474 gcc_assert (!flag_guess_branch_prob
5475 || optimize
== 0 || seen_error ());
5480 /* Expand a call to __builtin_assume_aligned. We just return our first
5481 argument as the builtin_assume_aligned semantic should've been already
5485 expand_builtin_assume_aligned (tree exp
, rtx target
)
5487 if (call_expr_nargs (exp
) < 2)
5489 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5491 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5492 && (call_expr_nargs (exp
) < 3
5493 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5498 expand_builtin_trap (void)
5500 if (targetm
.have_trap ())
5502 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5503 /* For trap insns when not accumulating outgoing args force
5504 REG_ARGS_SIZE note to prevent crossjumping of calls with
5505 different args sizes. */
5506 if (!ACCUMULATE_OUTGOING_ARGS
)
5507 add_args_size_note (insn
, stack_pointer_delta
);
5511 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5512 tree call_expr
= build_call_expr (fn
, 0);
5513 expand_call (call_expr
, NULL_RTX
, false);
5519 /* Expand a call to __builtin_unreachable. We do nothing except emit
5520 a barrier saying that control flow will not pass here.
5522 It is the responsibility of the program being compiled to ensure
5523 that control flow does never reach __builtin_unreachable. */
5525 expand_builtin_unreachable (void)
5530 /* Expand EXP, a call to fabs, fabsf or fabsl.
5531 Return NULL_RTX if a normal call should be emitted rather than expanding
5532 the function inline. If convenient, the result should be placed
5533 in TARGET. SUBTARGET may be used as the target for computing
5537 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5543 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5546 arg
= CALL_EXPR_ARG (exp
, 0);
5547 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5548 mode
= TYPE_MODE (TREE_TYPE (arg
));
5549 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5550 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5553 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5554 Return NULL is a normal call should be emitted rather than expanding the
5555 function inline. If convenient, the result should be placed in TARGET.
5556 SUBTARGET may be used as the target for computing the operand. */
5559 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5564 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5567 arg
= CALL_EXPR_ARG (exp
, 0);
5568 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5570 arg
= CALL_EXPR_ARG (exp
, 1);
5571 op1
= expand_normal (arg
);
5573 return expand_copysign (op0
, op1
, target
);
5576 /* Expand a call to __builtin___clear_cache. */
5579 expand_builtin___clear_cache (tree exp
)
5581 if (!targetm
.code_for_clear_cache
)
5583 #ifdef CLEAR_INSN_CACHE
5584 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5585 does something. Just do the default expansion to a call to
5589 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5590 does nothing. There is no need to call it. Do nothing. */
5592 #endif /* CLEAR_INSN_CACHE */
5595 /* We have a "clear_cache" insn, and it will handle everything. */
5597 rtx begin_rtx
, end_rtx
;
5599 /* We must not expand to a library call. If we did, any
5600 fallback library function in libgcc that might contain a call to
5601 __builtin___clear_cache() would recurse infinitely. */
5602 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5604 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5608 if (targetm
.have_clear_cache ())
5610 struct expand_operand ops
[2];
5612 begin
= CALL_EXPR_ARG (exp
, 0);
5613 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5615 end
= CALL_EXPR_ARG (exp
, 1);
5616 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5618 create_address_operand (&ops
[0], begin_rtx
);
5619 create_address_operand (&ops
[1], end_rtx
);
5620 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5626 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5629 round_trampoline_addr (rtx tramp
)
5631 rtx temp
, addend
, mask
;
5633 /* If we don't need too much alignment, we'll have been guaranteed
5634 proper alignment by get_trampoline_type. */
5635 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5638 /* Round address up to desired boundary. */
5639 temp
= gen_reg_rtx (Pmode
);
5640 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5641 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5643 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5644 temp
, 0, OPTAB_LIB_WIDEN
);
5645 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5646 temp
, 0, OPTAB_LIB_WIDEN
);
5652 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5654 tree t_tramp
, t_func
, t_chain
;
5655 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5657 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5658 POINTER_TYPE
, VOID_TYPE
))
5661 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5662 t_func
= CALL_EXPR_ARG (exp
, 1);
5663 t_chain
= CALL_EXPR_ARG (exp
, 2);
5665 r_tramp
= expand_normal (t_tramp
);
5666 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5667 MEM_NOTRAP_P (m_tramp
) = 1;
5669 /* If ONSTACK, the TRAMP argument should be the address of a field
5670 within the local function's FRAME decl. Either way, let's see if
5671 we can fill in the MEM_ATTRs for this memory. */
5672 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5673 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5675 /* Creator of a heap trampoline is responsible for making sure the
5676 address is aligned to at least STACK_BOUNDARY. Normally malloc
5677 will ensure this anyhow. */
5678 tmp
= round_trampoline_addr (r_tramp
);
5681 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5682 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5683 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5686 /* The FUNC argument should be the address of the nested function.
5687 Extract the actual function decl to pass to the hook. */
5688 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5689 t_func
= TREE_OPERAND (t_func
, 0);
5690 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5692 r_chain
= expand_normal (t_chain
);
5694 /* Generate insns to initialize the trampoline. */
5695 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5699 trampolines_created
= 1;
5701 if (targetm
.calls
.custom_function_descriptors
!= 0)
5702 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5703 "trampoline generated for nested function %qD", t_func
);
5710 expand_builtin_adjust_trampoline (tree exp
)
5714 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5717 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5718 tramp
= round_trampoline_addr (tramp
);
5719 if (targetm
.calls
.trampoline_adjust_address
)
5720 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5725 /* Expand a call to the builtin descriptor initialization routine.
5726 A descriptor is made up of a couple of pointers to the static
5727 chain and the code entry in this order. */
5730 expand_builtin_init_descriptor (tree exp
)
5732 tree t_descr
, t_func
, t_chain
;
5733 rtx m_descr
, r_descr
, r_func
, r_chain
;
5735 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5739 t_descr
= CALL_EXPR_ARG (exp
, 0);
5740 t_func
= CALL_EXPR_ARG (exp
, 1);
5741 t_chain
= CALL_EXPR_ARG (exp
, 2);
5743 r_descr
= expand_normal (t_descr
);
5744 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5745 MEM_NOTRAP_P (m_descr
) = 1;
5747 r_func
= expand_normal (t_func
);
5748 r_chain
= expand_normal (t_chain
);
5750 /* Generate insns to initialize the descriptor. */
5751 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5752 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5753 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5758 /* Expand a call to the builtin descriptor adjustment routine. */
5761 expand_builtin_adjust_descriptor (tree exp
)
5765 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5768 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5770 /* Unalign the descriptor to allow runtime identification. */
5771 tramp
= plus_constant (ptr_mode
, tramp
,
5772 targetm
.calls
.custom_function_descriptors
);
5774 return force_operand (tramp
, NULL_RTX
);
5777 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5778 function. The function first checks whether the back end provides
5779 an insn to implement signbit for the respective mode. If not, it
5780 checks whether the floating point format of the value is such that
5781 the sign bit can be extracted. If that is not the case, error out.
5782 EXP is the expression that is a call to the builtin function; if
5783 convenient, the result should be placed in TARGET. */
5785 expand_builtin_signbit (tree exp
, rtx target
)
5787 const struct real_format
*fmt
;
5788 scalar_float_mode fmode
;
5789 scalar_int_mode rmode
, imode
;
5792 enum insn_code icode
;
5794 location_t loc
= EXPR_LOCATION (exp
);
5796 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5799 arg
= CALL_EXPR_ARG (exp
, 0);
5800 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5801 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5802 fmt
= REAL_MODE_FORMAT (fmode
);
5804 arg
= builtin_save_expr (arg
);
5806 /* Expand the argument yielding a RTX expression. */
5807 temp
= expand_normal (arg
);
5809 /* Check if the back end provides an insn that handles signbit for the
5811 icode
= optab_handler (signbit_optab
, fmode
);
5812 if (icode
!= CODE_FOR_nothing
)
5814 rtx_insn
*last
= get_last_insn ();
5815 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5816 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5818 delete_insns_since (last
);
5821 /* For floating point formats without a sign bit, implement signbit
5823 bitpos
= fmt
->signbit_ro
;
5826 /* But we can't do this if the format supports signed zero. */
5827 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5829 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5830 build_real (TREE_TYPE (arg
), dconst0
));
5831 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5834 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5836 imode
= int_mode_for_mode (fmode
).require ();
5837 temp
= gen_lowpart (imode
, temp
);
5842 /* Handle targets with different FP word orders. */
5843 if (FLOAT_WORDS_BIG_ENDIAN
)
5844 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5846 word
= bitpos
/ BITS_PER_WORD
;
5847 temp
= operand_subword_force (temp
, word
, fmode
);
5848 bitpos
= bitpos
% BITS_PER_WORD
;
5851 /* Force the intermediate word_mode (or narrower) result into a
5852 register. This avoids attempting to create paradoxical SUBREGs
5853 of floating point modes below. */
5854 temp
= force_reg (imode
, temp
);
5856 /* If the bitpos is within the "result mode" lowpart, the operation
5857 can be implement with a single bitwise AND. Otherwise, we need
5858 a right shift and an AND. */
5860 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5862 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5864 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5865 temp
= gen_lowpart (rmode
, temp
);
5866 temp
= expand_binop (rmode
, and_optab
, temp
,
5867 immed_wide_int_const (mask
, rmode
),
5868 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5872 /* Perform a logical right shift to place the signbit in the least
5873 significant bit, then truncate the result to the desired mode
5874 and mask just this bit. */
5875 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5876 temp
= gen_lowpart (rmode
, temp
);
5877 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5878 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5884 /* Expand fork or exec calls. TARGET is the desired target of the
5885 call. EXP is the call. FN is the
5886 identificator of the actual function. IGNORE is nonzero if the
5887 value is to be ignored. */
5890 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5895 /* If we are not profiling, just call the function. */
5896 if (!profile_arc_flag
)
5899 /* Otherwise call the wrapper. This should be equivalent for the rest of
5900 compiler, so the code does not diverge, and the wrapper may run the
5901 code necessary for keeping the profiling sane. */
5903 switch (DECL_FUNCTION_CODE (fn
))
5906 id
= get_identifier ("__gcov_fork");
5909 case BUILT_IN_EXECL
:
5910 id
= get_identifier ("__gcov_execl");
5913 case BUILT_IN_EXECV
:
5914 id
= get_identifier ("__gcov_execv");
5917 case BUILT_IN_EXECLP
:
5918 id
= get_identifier ("__gcov_execlp");
5921 case BUILT_IN_EXECLE
:
5922 id
= get_identifier ("__gcov_execle");
5925 case BUILT_IN_EXECVP
:
5926 id
= get_identifier ("__gcov_execvp");
5929 case BUILT_IN_EXECVE
:
5930 id
= get_identifier ("__gcov_execve");
5937 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5938 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5939 DECL_EXTERNAL (decl
) = 1;
5940 TREE_PUBLIC (decl
) = 1;
5941 DECL_ARTIFICIAL (decl
) = 1;
5942 TREE_NOTHROW (decl
) = 1;
5943 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5944 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5945 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5946 return expand_call (call
, target
, ignore
);
5951 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5952 the pointer in these functions is void*, the tree optimizers may remove
5953 casts. The mode computed in expand_builtin isn't reliable either, due
5954 to __sync_bool_compare_and_swap.
5956 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5957 group of builtins. This gives us log2 of the mode size. */
5959 static inline machine_mode
5960 get_builtin_sync_mode (int fcode_diff
)
5962 /* The size is not negotiable, so ask not to get BLKmode in return
5963 if the target indicates that a smaller size would be better. */
5964 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5967 /* Expand the memory expression LOC and return the appropriate memory operand
5968 for the builtin_sync operations. */
5971 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5974 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
5975 ? TREE_TYPE (TREE_TYPE (loc
))
5977 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
5979 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
5980 addr
= convert_memory_address (addr_mode
, addr
);
5982 /* Note that we explicitly do not want any alias information for this
5983 memory, so that we kill all other live memories. Otherwise we don't
5984 satisfy the full barrier semantics of the intrinsic. */
5985 mem
= gen_rtx_MEM (mode
, addr
);
5987 set_mem_addr_space (mem
, addr_space
);
5989 mem
= validize_mem (mem
);
5991 /* The alignment needs to be at least according to that of the mode. */
5992 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5993 get_pointer_alignment (loc
)));
5994 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5995 MEM_VOLATILE_P (mem
) = 1;
6000 /* Make sure an argument is in the right mode.
6001 EXP is the tree argument.
6002 MODE is the mode it should be in. */
6005 expand_expr_force_mode (tree exp
, machine_mode mode
)
6008 machine_mode old_mode
;
6010 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6011 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6012 of CONST_INTs, where we know the old_mode only from the call argument. */
6014 old_mode
= GET_MODE (val
);
6015 if (old_mode
== VOIDmode
)
6016 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
6017 val
= convert_modes (mode
, old_mode
, val
, 1);
6022 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6023 EXP is the CALL_EXPR. CODE is the rtx code
6024 that corresponds to the arithmetic or logical operation from the name;
6025 an exception here is that NOT actually means NAND. TARGET is an optional
6026 place for us to store the results; AFTER is true if this is the
6027 fetch_and_xxx form. */
6030 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
6031 enum rtx_code code
, bool after
,
6035 location_t loc
= EXPR_LOCATION (exp
);
6037 if (code
== NOT
&& warn_sync_nand
)
6039 tree fndecl
= get_callee_fndecl (exp
);
6040 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6042 static bool warned_f_a_n
, warned_n_a_f
;
6046 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6047 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6048 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6049 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6050 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6054 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
6055 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6056 warned_f_a_n
= true;
6059 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6060 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6061 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6062 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6063 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6067 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
6068 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6069 warned_n_a_f
= true;
6077 /* Expand the operands. */
6078 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6079 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6081 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
6085 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6086 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6087 true if this is the boolean form. TARGET is a place for us to store the
6088 results; this is NOT optional if IS_BOOL is true. */
6091 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
6092 bool is_bool
, rtx target
)
6094 rtx old_val
, new_val
, mem
;
6097 /* Expand the operands. */
6098 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6099 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6100 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6102 pbool
= poval
= NULL
;
6103 if (target
!= const0_rtx
)
6110 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
6111 false, MEMMODEL_SYNC_SEQ_CST
,
6112 MEMMODEL_SYNC_SEQ_CST
))
6118 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6119 general form is actually an atomic exchange, and some targets only
6120 support a reduced form with the second argument being a constant 1.
6121 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6125 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
6130 /* Expand the operands. */
6131 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6132 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6134 return expand_sync_lock_test_and_set (target
, mem
, val
);
6137 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6140 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
6144 /* Expand the operands. */
6145 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6147 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
6150 /* Given an integer representing an ``enum memmodel'', verify its
6151 correctness and return the memory model enum. */
6153 static enum memmodel
6154 get_memmodel (tree exp
)
6157 unsigned HOST_WIDE_INT val
;
6159 = expansion_point_location_if_in_system_header (input_location
);
6161 /* If the parameter is not a constant, it's a run time value so we'll just
6162 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6163 if (TREE_CODE (exp
) != INTEGER_CST
)
6164 return MEMMODEL_SEQ_CST
;
6166 op
= expand_normal (exp
);
6169 if (targetm
.memmodel_check
)
6170 val
= targetm
.memmodel_check (val
);
6171 else if (val
& ~MEMMODEL_MASK
)
6173 warning_at (loc
, OPT_Winvalid_memory_model
,
6174 "unknown architecture specifier in memory model to builtin");
6175 return MEMMODEL_SEQ_CST
;
6178 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6179 if (memmodel_base (val
) >= MEMMODEL_LAST
)
6181 warning_at (loc
, OPT_Winvalid_memory_model
,
6182 "invalid memory model argument to builtin");
6183 return MEMMODEL_SEQ_CST
;
6186 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6187 be conservative and promote consume to acquire. */
6188 if (val
== MEMMODEL_CONSUME
)
6189 val
= MEMMODEL_ACQUIRE
;
6191 return (enum memmodel
) val
;
6194 /* Expand the __atomic_exchange intrinsic:
6195 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6196 EXP is the CALL_EXPR.
6197 TARGET is an optional place for us to store the results. */
6200 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
6203 enum memmodel model
;
6205 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6207 if (!flag_inline_atomics
)
6210 /* Expand the operands. */
6211 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6212 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6214 return expand_atomic_exchange (target
, mem
, val
, model
);
6217 /* Expand the __atomic_compare_exchange intrinsic:
6218 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6219 TYPE desired, BOOL weak,
6220 enum memmodel success,
6221 enum memmodel failure)
6222 EXP is the CALL_EXPR.
6223 TARGET is an optional place for us to store the results. */
6226 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
6229 rtx expect
, desired
, mem
, oldval
;
6230 rtx_code_label
*label
;
6231 enum memmodel success
, failure
;
6235 = expansion_point_location_if_in_system_header (input_location
);
6237 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
6238 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
6240 if (failure
> success
)
6242 warning_at (loc
, OPT_Winvalid_memory_model
,
6243 "failure memory model cannot be stronger than success "
6244 "memory model for %<__atomic_compare_exchange%>");
6245 success
= MEMMODEL_SEQ_CST
;
6248 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6250 warning_at (loc
, OPT_Winvalid_memory_model
,
6251 "invalid failure memory model for "
6252 "%<__atomic_compare_exchange%>");
6253 failure
= MEMMODEL_SEQ_CST
;
6254 success
= MEMMODEL_SEQ_CST
;
6258 if (!flag_inline_atomics
)
6261 /* Expand the operands. */
6262 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6264 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6265 expect
= convert_memory_address (Pmode
, expect
);
6266 expect
= gen_rtx_MEM (mode
, expect
);
6267 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6269 weak
= CALL_EXPR_ARG (exp
, 3);
6271 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
6274 if (target
== const0_rtx
)
6277 /* Lest the rtl backend create a race condition with an imporoper store
6278 to memory, always create a new pseudo for OLDVAL. */
6281 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
6282 is_weak
, success
, failure
))
6285 /* Conditionally store back to EXPECT, lest we create a race condition
6286 with an improper store to memory. */
6287 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6288 the normal case where EXPECT is totally private, i.e. a register. At
6289 which point the store can be unconditional. */
6290 label
= gen_label_rtx ();
6291 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
6292 GET_MODE (target
), 1, label
);
6293 emit_move_insn (expect
, oldval
);
6299 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6300 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6301 call. The weak parameter must be dropped to match the expected parameter
6302 list and the expected argument changed from value to pointer to memory
6306 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
6309 vec
<tree
, va_gc
> *vec
;
6312 vec
->quick_push (gimple_call_arg (call
, 0));
6313 tree expected
= gimple_call_arg (call
, 1);
6314 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
6315 TREE_TYPE (expected
));
6316 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
6318 emit_move_insn (x
, expd
);
6319 tree v
= make_tree (TREE_TYPE (expected
), x
);
6320 vec
->quick_push (build1 (ADDR_EXPR
,
6321 build_pointer_type (TREE_TYPE (expected
)), v
));
6322 vec
->quick_push (gimple_call_arg (call
, 2));
6323 /* Skip the boolean weak parameter. */
6324 for (z
= 4; z
< 6; z
++)
6325 vec
->quick_push (gimple_call_arg (call
, z
));
6326 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6327 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
6328 gcc_assert (bytes_log2
< 5);
6329 built_in_function fncode
6330 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6332 tree fndecl
= builtin_decl_explicit (fncode
);
6333 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
6335 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
6336 tree lhs
= gimple_call_lhs (call
);
6337 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
6340 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6341 if (GET_MODE (boolret
) != mode
)
6342 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6343 x
= force_reg (mode
, x
);
6344 write_complex_part (target
, boolret
, true);
6345 write_complex_part (target
, x
, false);
6349 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6352 expand_ifn_atomic_compare_exchange (gcall
*call
)
6354 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6355 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6356 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6357 rtx expect
, desired
, mem
, oldval
, boolret
;
6358 enum memmodel success
, failure
;
6362 = expansion_point_location_if_in_system_header (gimple_location (call
));
6364 success
= get_memmodel (gimple_call_arg (call
, 4));
6365 failure
= get_memmodel (gimple_call_arg (call
, 5));
6367 if (failure
> success
)
6369 warning_at (loc
, OPT_Winvalid_memory_model
,
6370 "failure memory model cannot be stronger than success "
6371 "memory model for %<__atomic_compare_exchange%>");
6372 success
= MEMMODEL_SEQ_CST
;
6375 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6377 warning_at (loc
, OPT_Winvalid_memory_model
,
6378 "invalid failure memory model for "
6379 "%<__atomic_compare_exchange%>");
6380 failure
= MEMMODEL_SEQ_CST
;
6381 success
= MEMMODEL_SEQ_CST
;
6384 if (!flag_inline_atomics
)
6386 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6390 /* Expand the operands. */
6391 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6393 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6394 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6396 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6401 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6402 is_weak
, success
, failure
))
6404 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6408 lhs
= gimple_call_lhs (call
);
6411 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6412 if (GET_MODE (boolret
) != mode
)
6413 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6414 write_complex_part (target
, boolret
, true);
6415 write_complex_part (target
, oldval
, false);
6419 /* Expand the __atomic_load intrinsic:
6420 TYPE __atomic_load (TYPE *object, enum memmodel)
6421 EXP is the CALL_EXPR.
6422 TARGET is an optional place for us to store the results. */
6425 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6428 enum memmodel model
;
6430 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6431 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6434 = expansion_point_location_if_in_system_header (input_location
);
6435 warning_at (loc
, OPT_Winvalid_memory_model
,
6436 "invalid memory model for %<__atomic_load%>");
6437 model
= MEMMODEL_SEQ_CST
;
6440 if (!flag_inline_atomics
)
6443 /* Expand the operand. */
6444 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6446 return expand_atomic_load (target
, mem
, model
);
6450 /* Expand the __atomic_store intrinsic:
6451 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6452 EXP is the CALL_EXPR.
6453 TARGET is an optional place for us to store the results. */
6456 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6459 enum memmodel model
;
6461 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6462 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6463 || is_mm_release (model
)))
6466 = expansion_point_location_if_in_system_header (input_location
);
6467 warning_at (loc
, OPT_Winvalid_memory_model
,
6468 "invalid memory model for %<__atomic_store%>");
6469 model
= MEMMODEL_SEQ_CST
;
6472 if (!flag_inline_atomics
)
6475 /* Expand the operands. */
6476 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6477 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6479 return expand_atomic_store (mem
, val
, model
, false);
6482 /* Expand the __atomic_fetch_XXX intrinsic:
6483 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6484 EXP is the CALL_EXPR.
6485 TARGET is an optional place for us to store the results.
6486 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6487 FETCH_AFTER is true if returning the result of the operation.
6488 FETCH_AFTER is false if returning the value before the operation.
6489 IGNORE is true if the result is not used.
6490 EXT_CALL is the correct builtin for an external call if this cannot be
6491 resolved to an instruction sequence. */
6494 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6495 enum rtx_code code
, bool fetch_after
,
6496 bool ignore
, enum built_in_function ext_call
)
6499 enum memmodel model
;
6503 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6505 /* Expand the operands. */
6506 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6507 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6509 /* Only try generating instructions if inlining is turned on. */
6510 if (flag_inline_atomics
)
6512 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6517 /* Return if a different routine isn't needed for the library call. */
6518 if (ext_call
== BUILT_IN_NONE
)
6521 /* Change the call to the specified function. */
6522 fndecl
= get_callee_fndecl (exp
);
6523 addr
= CALL_EXPR_FN (exp
);
6526 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6527 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6529 /* If we will emit code after the call, the call can not be a tail call.
6530 If it is emitted as a tail call, a barrier is emitted after it, and
6531 then all trailing code is removed. */
6533 CALL_EXPR_TAILCALL (exp
) = 0;
6535 /* Expand the call here so we can emit trailing code. */
6536 ret
= expand_call (exp
, target
, ignore
);
6538 /* Replace the original function just in case it matters. */
6539 TREE_OPERAND (addr
, 0) = fndecl
;
6541 /* Then issue the arithmetic correction to return the right result. */
6546 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6548 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6551 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6557 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6560 expand_ifn_atomic_bit_test_and (gcall
*call
)
6562 tree ptr
= gimple_call_arg (call
, 0);
6563 tree bit
= gimple_call_arg (call
, 1);
6564 tree flag
= gimple_call_arg (call
, 2);
6565 tree lhs
= gimple_call_lhs (call
);
6566 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6567 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6570 struct expand_operand ops
[5];
6572 gcc_assert (flag_inline_atomics
);
6574 if (gimple_call_num_args (call
) == 4)
6575 model
= get_memmodel (gimple_call_arg (call
, 3));
6577 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6578 rtx val
= expand_expr_force_mode (bit
, mode
);
6580 switch (gimple_call_internal_fn (call
))
6582 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6584 optab
= atomic_bit_test_and_set_optab
;
6586 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6588 optab
= atomic_bit_test_and_complement_optab
;
6590 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6592 optab
= atomic_bit_test_and_reset_optab
;
6598 if (lhs
== NULL_TREE
)
6600 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6601 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6603 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6604 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6608 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6609 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6610 gcc_assert (icode
!= CODE_FOR_nothing
);
6611 create_output_operand (&ops
[0], target
, mode
);
6612 create_fixed_operand (&ops
[1], mem
);
6613 create_convert_operand_to (&ops
[2], val
, mode
, true);
6614 create_integer_operand (&ops
[3], model
);
6615 create_integer_operand (&ops
[4], integer_onep (flag
));
6616 if (maybe_expand_insn (icode
, 5, ops
))
6620 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6621 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6624 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6625 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6626 code
, model
, false);
6627 if (integer_onep (flag
))
6629 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6630 NULL_RTX
, true, OPTAB_DIRECT
);
6631 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6632 true, OPTAB_DIRECT
);
6635 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6637 if (result
!= target
)
6638 emit_move_insn (target
, result
);
6641 /* Expand an atomic clear operation.
6642 void _atomic_clear (BOOL *obj, enum memmodel)
6643 EXP is the call expression. */
6646 expand_builtin_atomic_clear (tree exp
)
6650 enum memmodel model
;
6652 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6653 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6654 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6656 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6659 = expansion_point_location_if_in_system_header (input_location
);
6660 warning_at (loc
, OPT_Winvalid_memory_model
,
6661 "invalid memory model for %<__atomic_store%>");
6662 model
= MEMMODEL_SEQ_CST
;
6665 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6666 Failing that, a store is issued by __atomic_store. The only way this can
6667 fail is if the bool type is larger than a word size. Unlikely, but
6668 handle it anyway for completeness. Assume a single threaded model since
6669 there is no atomic support in this case, and no barriers are required. */
6670 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6672 emit_move_insn (mem
, const0_rtx
);
6676 /* Expand an atomic test_and_set operation.
6677 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6678 EXP is the call expression. */
6681 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6684 enum memmodel model
;
6687 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6688 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6689 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6691 return expand_atomic_test_and_set (target
, mem
, model
);
6695 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6696 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6699 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6703 unsigned int mode_align
, type_align
;
6705 if (TREE_CODE (arg0
) != INTEGER_CST
)
6708 /* We need a corresponding integer mode for the access to be lock-free. */
6709 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6710 if (!int_mode_for_size (size
, 0).exists (&mode
))
6711 return boolean_false_node
;
6713 mode_align
= GET_MODE_ALIGNMENT (mode
);
6715 if (TREE_CODE (arg1
) == INTEGER_CST
)
6717 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6719 /* Either this argument is null, or it's a fake pointer encoding
6720 the alignment of the object. */
6721 val
= least_bit_hwi (val
);
6722 val
*= BITS_PER_UNIT
;
6724 if (val
== 0 || mode_align
< val
)
6725 type_align
= mode_align
;
6731 tree ttype
= TREE_TYPE (arg1
);
6733 /* This function is usually invoked and folded immediately by the front
6734 end before anything else has a chance to look at it. The pointer
6735 parameter at this point is usually cast to a void *, so check for that
6736 and look past the cast. */
6737 if (CONVERT_EXPR_P (arg1
)
6738 && POINTER_TYPE_P (ttype
)
6739 && VOID_TYPE_P (TREE_TYPE (ttype
))
6740 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6741 arg1
= TREE_OPERAND (arg1
, 0);
6743 ttype
= TREE_TYPE (arg1
);
6744 gcc_assert (POINTER_TYPE_P (ttype
));
6746 /* Get the underlying type of the object. */
6747 ttype
= TREE_TYPE (ttype
);
6748 type_align
= TYPE_ALIGN (ttype
);
6751 /* If the object has smaller alignment, the lock free routines cannot
6753 if (type_align
< mode_align
)
6754 return boolean_false_node
;
6756 /* Check if a compare_and_swap pattern exists for the mode which represents
6757 the required size. The pattern is not allowed to fail, so the existence
6758 of the pattern indicates support is present. Also require that an
6759 atomic load exists for the required size. */
6760 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6761 return boolean_true_node
;
6763 return boolean_false_node
;
6766 /* Return true if the parameters to call EXP represent an object which will
6767 always generate lock free instructions. The first argument represents the
6768 size of the object, and the second parameter is a pointer to the object
6769 itself. If NULL is passed for the object, then the result is based on
6770 typical alignment for an object of the specified size. Otherwise return
6774 expand_builtin_atomic_always_lock_free (tree exp
)
6777 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6778 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6780 if (TREE_CODE (arg0
) != INTEGER_CST
)
6782 error ("non-constant argument 1 to __atomic_always_lock_free");
6786 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6787 if (size
== boolean_true_node
)
6792 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6793 is lock free on this architecture. */
6796 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6798 if (!flag_inline_atomics
)
6801 /* If it isn't always lock free, don't generate a result. */
6802 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6803 return boolean_true_node
;
6808 /* Return true if the parameters to call EXP represent an object which will
6809 always generate lock free instructions. The first argument represents the
6810 size of the object, and the second parameter is a pointer to the object
6811 itself. If NULL is passed for the object, then the result is based on
6812 typical alignment for an object of the specified size. Otherwise return
6816 expand_builtin_atomic_is_lock_free (tree exp
)
6819 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6820 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6822 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6824 error ("non-integer argument 1 to __atomic_is_lock_free");
6828 if (!flag_inline_atomics
)
6831 /* If the value is known at compile time, return the RTX for it. */
6832 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6833 if (size
== boolean_true_node
)
6839 /* Expand the __atomic_thread_fence intrinsic:
6840 void __atomic_thread_fence (enum memmodel)
6841 EXP is the CALL_EXPR. */
6844 expand_builtin_atomic_thread_fence (tree exp
)
6846 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6847 expand_mem_thread_fence (model
);
6850 /* Expand the __atomic_signal_fence intrinsic:
6851 void __atomic_signal_fence (enum memmodel)
6852 EXP is the CALL_EXPR. */
6855 expand_builtin_atomic_signal_fence (tree exp
)
6857 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6858 expand_mem_signal_fence (model
);
6861 /* Expand the __sync_synchronize intrinsic. */
6864 expand_builtin_sync_synchronize (void)
6866 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6870 expand_builtin_thread_pointer (tree exp
, rtx target
)
6872 enum insn_code icode
;
6873 if (!validate_arglist (exp
, VOID_TYPE
))
6875 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6876 if (icode
!= CODE_FOR_nothing
)
6878 struct expand_operand op
;
6879 /* If the target is not sutitable then create a new target. */
6880 if (target
== NULL_RTX
6882 || GET_MODE (target
) != Pmode
)
6883 target
= gen_reg_rtx (Pmode
);
6884 create_output_operand (&op
, target
, Pmode
);
6885 expand_insn (icode
, 1, &op
);
6888 error ("__builtin_thread_pointer is not supported on this target");
6893 expand_builtin_set_thread_pointer (tree exp
)
6895 enum insn_code icode
;
6896 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6898 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6899 if (icode
!= CODE_FOR_nothing
)
6901 struct expand_operand op
;
6902 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6903 Pmode
, EXPAND_NORMAL
);
6904 create_input_operand (&op
, val
, Pmode
);
6905 expand_insn (icode
, 1, &op
);
6908 error ("__builtin_set_thread_pointer is not supported on this target");
6912 /* Emit code to restore the current value of stack. */
6915 expand_stack_restore (tree var
)
6918 rtx sa
= expand_normal (var
);
6920 sa
= convert_memory_address (Pmode
, sa
);
6922 prev
= get_last_insn ();
6923 emit_stack_restore (SAVE_BLOCK
, sa
);
6925 record_new_stack_level ();
6927 fixup_args_size_notes (prev
, get_last_insn (), 0);
6930 /* Emit code to save the current value of stack. */
6933 expand_stack_save (void)
6937 emit_stack_save (SAVE_BLOCK
, &ret
);
6941 /* Emit code to get the openacc gang, worker or vector id or size. */
6944 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
6947 rtx fallback_retval
;
6948 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
6949 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
6951 case BUILT_IN_GOACC_PARLEVEL_ID
:
6952 name
= "__builtin_goacc_parlevel_id";
6953 fallback_retval
= const0_rtx
;
6954 gen_fn
= targetm
.gen_oacc_dim_pos
;
6956 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
6957 name
= "__builtin_goacc_parlevel_size";
6958 fallback_retval
= const1_rtx
;
6959 gen_fn
= targetm
.gen_oacc_dim_size
;
6965 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
6967 error ("%qs only supported in OpenACC code", name
);
6971 tree arg
= CALL_EXPR_ARG (exp
, 0);
6972 if (TREE_CODE (arg
) != INTEGER_CST
)
6974 error ("non-constant argument 0 to %qs", name
);
6978 int dim
= TREE_INT_CST_LOW (arg
);
6982 case GOMP_DIM_WORKER
:
6983 case GOMP_DIM_VECTOR
:
6986 error ("illegal argument 0 to %qs", name
);
6993 if (target
== NULL_RTX
)
6994 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6996 if (!targetm
.have_oacc_dim_size ())
6998 emit_move_insn (target
, fallback_retval
);
7002 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
7003 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
7005 emit_move_insn (target
, reg
);
7010 /* Expand a string compare operation using a sequence of char comparison
7011 to get rid of the calling overhead, with result going to TARGET if
7014 VAR_STR is the variable string source;
7015 CONST_STR is the constant string source;
7016 LENGTH is the number of chars to compare;
7017 CONST_STR_N indicates which source string is the constant string;
7018 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7020 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7022 target = (int) (unsigned char) var_str[0]
7023 - (int) (unsigned char) const_str[0];
7027 target = (int) (unsigned char) var_str[length - 2]
7028 - (int) (unsigned char) const_str[length - 2];
7031 target = (int) (unsigned char) var_str[length - 1]
7032 - (int) (unsigned char) const_str[length - 1];
7037 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
7038 unsigned HOST_WIDE_INT length
,
7039 int const_str_n
, machine_mode mode
)
7041 HOST_WIDE_INT offset
= 0;
7043 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
7044 rtx var_rtx
= NULL_RTX
;
7045 rtx const_rtx
= NULL_RTX
;
7046 rtx result
= target
? target
: gen_reg_rtx (mode
);
7047 rtx_code_label
*ne_label
= gen_label_rtx ();
7048 tree unit_type_node
= unsigned_char_type_node
;
7049 scalar_int_mode unit_mode
7050 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
7054 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
7057 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
7058 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
7059 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
7060 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
7062 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
7063 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
7064 result
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
7065 result
, 1, OPTAB_WIDEN
);
7067 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
7068 mode
, true, ne_label
);
7069 offset
+= GET_MODE_SIZE (unit_mode
);
7072 emit_label (ne_label
);
7073 rtx_insn
*insns
= get_insns ();
7080 /* Inline expansion a call to str(n)cmp, with result going to
7081 TARGET if that's convenient.
7082 If the call is not been inlined, return NULL_RTX. */
7084 inline_expand_builtin_string_cmp (tree exp
, rtx target
)
7086 tree fndecl
= get_callee_fndecl (exp
);
7087 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7088 unsigned HOST_WIDE_INT length
= 0;
7089 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
7091 /* Do NOT apply this inlining expansion when optimizing for size or
7092 optimization level below 2. */
7093 if (optimize
< 2 || optimize_insn_for_size_p ())
7096 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
7097 || fcode
== BUILT_IN_STRNCMP
7098 || fcode
== BUILT_IN_MEMCMP
);
7100 /* On a target where the type of the call (int) has same or narrower presicion
7101 than unsigned char, give up the inlining expansion. */
7102 if (TYPE_PRECISION (unsigned_char_type_node
)
7103 >= TYPE_PRECISION (TREE_TYPE (exp
)))
7106 tree arg1
= CALL_EXPR_ARG (exp
, 0);
7107 tree arg2
= CALL_EXPR_ARG (exp
, 1);
7108 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
7110 unsigned HOST_WIDE_INT len1
= 0;
7111 unsigned HOST_WIDE_INT len2
= 0;
7112 unsigned HOST_WIDE_INT len3
= 0;
7114 const char *src_str1
= c_getstr (arg1
, &len1
);
7115 const char *src_str2
= c_getstr (arg2
, &len2
);
7117 /* If neither strings is constant string, the call is not qualify. */
7118 if (!src_str1
&& !src_str2
)
7121 /* For strncmp, if the length is not a const, not qualify. */
7122 if (is_ncmp
&& !tree_fits_uhwi_p (len3_tree
))
7125 int const_str_n
= 0;
7130 else if (len2
> len1
)
7135 gcc_checking_assert (const_str_n
> 0);
7136 length
= (const_str_n
== 1) ? len1
: len2
;
7138 if (is_ncmp
&& (len3
= tree_to_uhwi (len3_tree
)) < length
)
7141 /* If the length of the comparision is larger than the threshold,
7143 if (length
> (unsigned HOST_WIDE_INT
)
7144 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH
))
7147 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
7149 /* Now, start inline expansion the call. */
7150 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
7151 (const_str_n
== 1) ? src_str1
: src_str2
, length
,
7155 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7156 represents the size of the first argument to that call, or VOIDmode
7157 if the argument is a pointer. IGNORE will be true if the result
7160 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
7164 unsigned nargs
= call_expr_nargs (exp
);
7166 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7168 if (mode
== VOIDmode
)
7170 mode
= TYPE_MODE (TREE_TYPE (arg0
));
7171 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
7174 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7176 /* An optional second argument can be used as a failsafe value on
7177 some machines. If it isn't present, then the failsafe value is
7181 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7182 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7185 failsafe
= const0_rtx
;
7187 /* If the result isn't used, the behavior is undefined. It would be
7188 nice to emit a warning here, but path splitting means this might
7189 happen with legitimate code. So simply drop the builtin
7190 expansion in that case; we've handled any side-effects above. */
7194 /* If we don't have a suitable target, create one to hold the result. */
7195 if (target
== NULL
|| GET_MODE (target
) != mode
)
7196 target
= gen_reg_rtx (mode
);
7198 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
7199 val
= convert_modes (mode
, VOIDmode
, val
, false);
7201 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
7204 /* Expand an expression EXP that calls a built-in function,
7205 with result going to TARGET if that's convenient
7206 (and in mode MODE if that's convenient).
7207 SUBTARGET may be used as the target for computing one of EXP's operands.
7208 IGNORE is nonzero if the value is to be ignored. */
7211 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
7214 tree fndecl
= get_callee_fndecl (exp
);
7215 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7216 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
7219 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7220 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7222 /* When ASan is enabled, we don't want to expand some memory/string
7223 builtins and rely on libsanitizer's hooks. This allows us to avoid
7224 redundant checks and be sure, that possible overflow will be detected
7227 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
7228 return expand_call (exp
, target
, ignore
);
7230 /* When not optimizing, generate calls to library functions for a certain
7233 && !called_as_built_in (fndecl
)
7234 && fcode
!= BUILT_IN_FORK
7235 && fcode
!= BUILT_IN_EXECL
7236 && fcode
!= BUILT_IN_EXECV
7237 && fcode
!= BUILT_IN_EXECLP
7238 && fcode
!= BUILT_IN_EXECLE
7239 && fcode
!= BUILT_IN_EXECVP
7240 && fcode
!= BUILT_IN_EXECVE
7241 && !ALLOCA_FUNCTION_CODE_P (fcode
)
7242 && fcode
!= BUILT_IN_FREE
)
7243 return expand_call (exp
, target
, ignore
);
7245 /* The built-in function expanders test for target == const0_rtx
7246 to determine whether the function's result will be ignored. */
7248 target
= const0_rtx
;
7250 /* If the result of a pure or const built-in function is ignored, and
7251 none of its arguments are volatile, we can avoid expanding the
7252 built-in call and just evaluate the arguments for side-effects. */
7253 if (target
== const0_rtx
7254 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
7255 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
7257 bool volatilep
= false;
7259 call_expr_arg_iterator iter
;
7261 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7262 if (TREE_THIS_VOLATILE (arg
))
7270 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7271 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7278 CASE_FLT_FN (BUILT_IN_FABS
):
7279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
7280 case BUILT_IN_FABSD32
:
7281 case BUILT_IN_FABSD64
:
7282 case BUILT_IN_FABSD128
:
7283 target
= expand_builtin_fabs (exp
, target
, subtarget
);
7288 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
7289 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
7290 target
= expand_builtin_copysign (exp
, target
, subtarget
);
7295 /* Just do a normal library call if we were unable to fold
7297 CASE_FLT_FN (BUILT_IN_CABS
):
7300 CASE_FLT_FN (BUILT_IN_FMA
):
7301 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
7302 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
7307 CASE_FLT_FN (BUILT_IN_ILOGB
):
7308 if (! flag_unsafe_math_optimizations
)
7311 CASE_FLT_FN (BUILT_IN_ISINF
):
7312 CASE_FLT_FN (BUILT_IN_FINITE
):
7313 case BUILT_IN_ISFINITE
:
7314 case BUILT_IN_ISNORMAL
:
7315 target
= expand_builtin_interclass_mathfn (exp
, target
);
7320 CASE_FLT_FN (BUILT_IN_ICEIL
):
7321 CASE_FLT_FN (BUILT_IN_LCEIL
):
7322 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7323 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7324 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7325 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7326 target
= expand_builtin_int_roundingfn (exp
, target
);
7331 CASE_FLT_FN (BUILT_IN_IRINT
):
7332 CASE_FLT_FN (BUILT_IN_LRINT
):
7333 CASE_FLT_FN (BUILT_IN_LLRINT
):
7334 CASE_FLT_FN (BUILT_IN_IROUND
):
7335 CASE_FLT_FN (BUILT_IN_LROUND
):
7336 CASE_FLT_FN (BUILT_IN_LLROUND
):
7337 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
7342 CASE_FLT_FN (BUILT_IN_POWI
):
7343 target
= expand_builtin_powi (exp
, target
);
7348 CASE_FLT_FN (BUILT_IN_CEXPI
):
7349 target
= expand_builtin_cexpi (exp
, target
);
7350 gcc_assert (target
);
7353 CASE_FLT_FN (BUILT_IN_SIN
):
7354 CASE_FLT_FN (BUILT_IN_COS
):
7355 if (! flag_unsafe_math_optimizations
)
7357 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
7362 CASE_FLT_FN (BUILT_IN_SINCOS
):
7363 if (! flag_unsafe_math_optimizations
)
7365 target
= expand_builtin_sincos (exp
);
7370 case BUILT_IN_APPLY_ARGS
:
7371 return expand_builtin_apply_args ();
7373 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7374 FUNCTION with a copy of the parameters described by
7375 ARGUMENTS, and ARGSIZE. It returns a block of memory
7376 allocated on the stack into which is stored all the registers
7377 that might possibly be used for returning the result of a
7378 function. ARGUMENTS is the value returned by
7379 __builtin_apply_args. ARGSIZE is the number of bytes of
7380 arguments that must be copied. ??? How should this value be
7381 computed? We'll also need a safe worst case value for varargs
7383 case BUILT_IN_APPLY
:
7384 if (!validate_arglist (exp
, POINTER_TYPE
,
7385 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
7386 && !validate_arglist (exp
, REFERENCE_TYPE
,
7387 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7393 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
7394 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
7395 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
7397 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7400 /* __builtin_return (RESULT) causes the function to return the
7401 value described by RESULT. RESULT is address of the block of
7402 memory returned by __builtin_apply. */
7403 case BUILT_IN_RETURN
:
7404 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7405 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
7408 case BUILT_IN_SAVEREGS
:
7409 return expand_builtin_saveregs ();
7411 case BUILT_IN_VA_ARG_PACK
:
7412 /* All valid uses of __builtin_va_arg_pack () are removed during
7414 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
7417 case BUILT_IN_VA_ARG_PACK_LEN
:
7418 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7420 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
7423 /* Return the address of the first anonymous stack arg. */
7424 case BUILT_IN_NEXT_ARG
:
7425 if (fold_builtin_next_arg (exp
, false))
7427 return expand_builtin_next_arg ();
7429 case BUILT_IN_CLEAR_CACHE
:
7430 target
= expand_builtin___clear_cache (exp
);
7435 case BUILT_IN_CLASSIFY_TYPE
:
7436 return expand_builtin_classify_type (exp
);
7438 case BUILT_IN_CONSTANT_P
:
7441 case BUILT_IN_FRAME_ADDRESS
:
7442 case BUILT_IN_RETURN_ADDRESS
:
7443 return expand_builtin_frame_address (fndecl
, exp
);
7445 /* Returns the address of the area where the structure is returned.
7447 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
7448 if (call_expr_nargs (exp
) != 0
7449 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
7450 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
7453 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
7455 CASE_BUILT_IN_ALLOCA
:
7456 target
= expand_builtin_alloca (exp
);
7461 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
7462 return expand_asan_emit_allocas_unpoison (exp
);
7464 case BUILT_IN_STACK_SAVE
:
7465 return expand_stack_save ();
7467 case BUILT_IN_STACK_RESTORE
:
7468 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
7471 case BUILT_IN_BSWAP16
:
7472 case BUILT_IN_BSWAP32
:
7473 case BUILT_IN_BSWAP64
:
7474 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
7479 CASE_INT_FN (BUILT_IN_FFS
):
7480 target
= expand_builtin_unop (target_mode
, exp
, target
,
7481 subtarget
, ffs_optab
);
7486 CASE_INT_FN (BUILT_IN_CLZ
):
7487 target
= expand_builtin_unop (target_mode
, exp
, target
,
7488 subtarget
, clz_optab
);
7493 CASE_INT_FN (BUILT_IN_CTZ
):
7494 target
= expand_builtin_unop (target_mode
, exp
, target
,
7495 subtarget
, ctz_optab
);
7500 CASE_INT_FN (BUILT_IN_CLRSB
):
7501 target
= expand_builtin_unop (target_mode
, exp
, target
,
7502 subtarget
, clrsb_optab
);
7507 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7508 target
= expand_builtin_unop (target_mode
, exp
, target
,
7509 subtarget
, popcount_optab
);
7514 CASE_INT_FN (BUILT_IN_PARITY
):
7515 target
= expand_builtin_unop (target_mode
, exp
, target
,
7516 subtarget
, parity_optab
);
7521 case BUILT_IN_STRLEN
:
7522 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7527 case BUILT_IN_STRNLEN
:
7528 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
7533 case BUILT_IN_STRCAT
:
7534 target
= expand_builtin_strcat (exp
, target
);
7539 case BUILT_IN_STRCPY
:
7540 target
= expand_builtin_strcpy (exp
, target
);
7545 case BUILT_IN_STRNCAT
:
7546 target
= expand_builtin_strncat (exp
, target
);
7551 case BUILT_IN_STRNCPY
:
7552 target
= expand_builtin_strncpy (exp
, target
);
7557 case BUILT_IN_STPCPY
:
7558 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7563 case BUILT_IN_STPNCPY
:
7564 target
= expand_builtin_stpncpy (exp
, target
);
7569 case BUILT_IN_MEMCHR
:
7570 target
= expand_builtin_memchr (exp
, target
);
7575 case BUILT_IN_MEMCPY
:
7576 target
= expand_builtin_memcpy (exp
, target
);
7581 case BUILT_IN_MEMMOVE
:
7582 target
= expand_builtin_memmove (exp
, target
);
7587 case BUILT_IN_MEMPCPY
:
7588 target
= expand_builtin_mempcpy (exp
, target
);
7593 case BUILT_IN_MEMSET
:
7594 target
= expand_builtin_memset (exp
, target
, mode
);
7599 case BUILT_IN_BZERO
:
7600 target
= expand_builtin_bzero (exp
);
7605 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7606 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7607 when changing it to a strcmp call. */
7608 case BUILT_IN_STRCMP_EQ
:
7609 target
= expand_builtin_memcmp (exp
, target
, true);
7613 /* Change this call back to a BUILT_IN_STRCMP. */
7614 TREE_OPERAND (exp
, 1)
7615 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
7617 /* Delete the last parameter. */
7619 vec
<tree
, va_gc
> *arg_vec
;
7620 vec_alloc (arg_vec
, 2);
7621 for (i
= 0; i
< 2; i
++)
7622 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
7623 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
7626 case BUILT_IN_STRCMP
:
7627 target
= expand_builtin_strcmp (exp
, target
);
7632 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7633 back to a BUILT_IN_STRNCMP. */
7634 case BUILT_IN_STRNCMP_EQ
:
7635 target
= expand_builtin_memcmp (exp
, target
, true);
7639 /* Change it back to a BUILT_IN_STRNCMP. */
7640 TREE_OPERAND (exp
, 1)
7641 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
7644 case BUILT_IN_STRNCMP
:
7645 target
= expand_builtin_strncmp (exp
, target
, mode
);
7651 case BUILT_IN_MEMCMP
:
7652 case BUILT_IN_MEMCMP_EQ
:
7653 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7656 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7658 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7659 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7663 case BUILT_IN_SETJMP
:
7664 /* This should have been lowered to the builtins below. */
7667 case BUILT_IN_SETJMP_SETUP
:
7668 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7669 and the receiver label. */
7670 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7672 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7673 VOIDmode
, EXPAND_NORMAL
);
7674 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7675 rtx_insn
*label_r
= label_rtx (label
);
7677 /* This is copied from the handling of non-local gotos. */
7678 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7679 nonlocal_goto_handler_labels
7680 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7681 nonlocal_goto_handler_labels
);
7682 /* ??? Do not let expand_label treat us as such since we would
7683 not want to be both on the list of non-local labels and on
7684 the list of forced labels. */
7685 FORCED_LABEL (label
) = 0;
7690 case BUILT_IN_SETJMP_RECEIVER
:
7691 /* __builtin_setjmp_receiver is passed the receiver label. */
7692 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7694 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7695 rtx_insn
*label_r
= label_rtx (label
);
7697 expand_builtin_setjmp_receiver (label_r
);
7702 /* __builtin_longjmp is passed a pointer to an array of five words.
7703 It's similar to the C library longjmp function but works with
7704 __builtin_setjmp above. */
7705 case BUILT_IN_LONGJMP
:
7706 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7708 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7709 VOIDmode
, EXPAND_NORMAL
);
7710 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7712 if (value
!= const1_rtx
)
7714 error ("%<__builtin_longjmp%> second argument must be 1");
7718 expand_builtin_longjmp (buf_addr
, value
);
7723 case BUILT_IN_NONLOCAL_GOTO
:
7724 target
= expand_builtin_nonlocal_goto (exp
);
7729 /* This updates the setjmp buffer that is its argument with the value
7730 of the current stack pointer. */
7731 case BUILT_IN_UPDATE_SETJMP_BUF
:
7732 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7735 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7737 expand_builtin_update_setjmp_buf (buf_addr
);
7743 expand_builtin_trap ();
7746 case BUILT_IN_UNREACHABLE
:
7747 expand_builtin_unreachable ();
7750 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7751 case BUILT_IN_SIGNBITD32
:
7752 case BUILT_IN_SIGNBITD64
:
7753 case BUILT_IN_SIGNBITD128
:
7754 target
= expand_builtin_signbit (exp
, target
);
7759 /* Various hooks for the DWARF 2 __throw routine. */
7760 case BUILT_IN_UNWIND_INIT
:
7761 expand_builtin_unwind_init ();
7763 case BUILT_IN_DWARF_CFA
:
7764 return virtual_cfa_rtx
;
7765 #ifdef DWARF2_UNWIND_INFO
7766 case BUILT_IN_DWARF_SP_COLUMN
:
7767 return expand_builtin_dwarf_sp_column ();
7768 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7769 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7772 case BUILT_IN_FROB_RETURN_ADDR
:
7773 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7774 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7775 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7776 case BUILT_IN_EH_RETURN
:
7777 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7778 CALL_EXPR_ARG (exp
, 1));
7780 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7781 return expand_builtin_eh_return_data_regno (exp
);
7782 case BUILT_IN_EXTEND_POINTER
:
7783 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7784 case BUILT_IN_EH_POINTER
:
7785 return expand_builtin_eh_pointer (exp
);
7786 case BUILT_IN_EH_FILTER
:
7787 return expand_builtin_eh_filter (exp
);
7788 case BUILT_IN_EH_COPY_VALUES
:
7789 return expand_builtin_eh_copy_values (exp
);
7791 case BUILT_IN_VA_START
:
7792 return expand_builtin_va_start (exp
);
7793 case BUILT_IN_VA_END
:
7794 return expand_builtin_va_end (exp
);
7795 case BUILT_IN_VA_COPY
:
7796 return expand_builtin_va_copy (exp
);
7797 case BUILT_IN_EXPECT
:
7798 return expand_builtin_expect (exp
, target
);
7799 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
7800 return expand_builtin_expect_with_probability (exp
, target
);
7801 case BUILT_IN_ASSUME_ALIGNED
:
7802 return expand_builtin_assume_aligned (exp
, target
);
7803 case BUILT_IN_PREFETCH
:
7804 expand_builtin_prefetch (exp
);
7807 case BUILT_IN_INIT_TRAMPOLINE
:
7808 return expand_builtin_init_trampoline (exp
, true);
7809 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7810 return expand_builtin_init_trampoline (exp
, false);
7811 case BUILT_IN_ADJUST_TRAMPOLINE
:
7812 return expand_builtin_adjust_trampoline (exp
);
7814 case BUILT_IN_INIT_DESCRIPTOR
:
7815 return expand_builtin_init_descriptor (exp
);
7816 case BUILT_IN_ADJUST_DESCRIPTOR
:
7817 return expand_builtin_adjust_descriptor (exp
);
7820 case BUILT_IN_EXECL
:
7821 case BUILT_IN_EXECV
:
7822 case BUILT_IN_EXECLP
:
7823 case BUILT_IN_EXECLE
:
7824 case BUILT_IN_EXECVP
:
7825 case BUILT_IN_EXECVE
:
7826 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7831 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7832 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7833 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7834 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7835 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7836 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7837 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7842 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7843 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7844 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7845 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7846 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7847 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7848 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7853 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7854 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7855 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7856 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7857 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7858 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7859 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7864 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7865 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7866 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7867 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7868 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7869 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7870 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7875 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7876 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7877 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7878 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7879 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7880 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7881 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7886 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7887 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7888 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7889 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7890 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7891 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7892 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7897 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7898 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7899 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7900 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7901 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7902 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7903 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7908 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7909 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7910 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7911 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7912 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7913 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7914 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7919 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7920 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7921 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7922 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7923 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7924 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7925 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7930 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7931 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7932 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7933 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7934 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7935 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7936 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7941 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7942 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7943 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7944 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7945 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7946 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7947 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7952 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7953 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7954 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7955 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7956 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7957 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7958 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7963 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7964 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7965 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7966 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7967 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7968 if (mode
== VOIDmode
)
7969 mode
= TYPE_MODE (boolean_type_node
);
7970 if (!target
|| !register_operand (target
, mode
))
7971 target
= gen_reg_rtx (mode
);
7973 mode
= get_builtin_sync_mode
7974 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7975 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7980 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7981 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7982 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7983 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7984 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7985 mode
= get_builtin_sync_mode
7986 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7987 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7992 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7993 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7994 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7995 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7996 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7997 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7998 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
8003 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
8004 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
8005 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
8006 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
8007 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
8008 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
8009 expand_builtin_sync_lock_release (mode
, exp
);
8012 case BUILT_IN_SYNC_SYNCHRONIZE
:
8013 expand_builtin_sync_synchronize ();
8016 case BUILT_IN_ATOMIC_EXCHANGE_1
:
8017 case BUILT_IN_ATOMIC_EXCHANGE_2
:
8018 case BUILT_IN_ATOMIC_EXCHANGE_4
:
8019 case BUILT_IN_ATOMIC_EXCHANGE_8
:
8020 case BUILT_IN_ATOMIC_EXCHANGE_16
:
8021 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
8022 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
8027 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
8028 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
8029 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
8030 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
8031 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
8033 unsigned int nargs
, z
;
8034 vec
<tree
, va_gc
> *vec
;
8037 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
8038 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
8042 /* If this is turned into an external library call, the weak parameter
8043 must be dropped to match the expected parameter list. */
8044 nargs
= call_expr_nargs (exp
);
8045 vec_alloc (vec
, nargs
- 1);
8046 for (z
= 0; z
< 3; z
++)
8047 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8048 /* Skip the boolean weak parameter. */
8049 for (z
= 4; z
< 6; z
++)
8050 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8051 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
8055 case BUILT_IN_ATOMIC_LOAD_1
:
8056 case BUILT_IN_ATOMIC_LOAD_2
:
8057 case BUILT_IN_ATOMIC_LOAD_4
:
8058 case BUILT_IN_ATOMIC_LOAD_8
:
8059 case BUILT_IN_ATOMIC_LOAD_16
:
8060 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
8061 target
= expand_builtin_atomic_load (mode
, exp
, target
);
8066 case BUILT_IN_ATOMIC_STORE_1
:
8067 case BUILT_IN_ATOMIC_STORE_2
:
8068 case BUILT_IN_ATOMIC_STORE_4
:
8069 case BUILT_IN_ATOMIC_STORE_8
:
8070 case BUILT_IN_ATOMIC_STORE_16
:
8071 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
8072 target
= expand_builtin_atomic_store (mode
, exp
);
8077 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
8078 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
8079 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
8080 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
8081 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
8083 enum built_in_function lib
;
8084 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
8085 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
8086 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
8087 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
8093 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
8094 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
8095 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
8096 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
8097 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
8099 enum built_in_function lib
;
8100 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
8101 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
8102 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
8103 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
8109 case BUILT_IN_ATOMIC_AND_FETCH_1
:
8110 case BUILT_IN_ATOMIC_AND_FETCH_2
:
8111 case BUILT_IN_ATOMIC_AND_FETCH_4
:
8112 case BUILT_IN_ATOMIC_AND_FETCH_8
:
8113 case BUILT_IN_ATOMIC_AND_FETCH_16
:
8115 enum built_in_function lib
;
8116 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
8117 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
8118 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
8119 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
8125 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
8126 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
8127 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
8128 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
8129 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
8131 enum built_in_function lib
;
8132 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
8133 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
8134 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
8135 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
8141 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
8142 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
8143 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
8144 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
8145 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
8147 enum built_in_function lib
;
8148 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
8149 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
8150 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
8151 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
8157 case BUILT_IN_ATOMIC_OR_FETCH_1
:
8158 case BUILT_IN_ATOMIC_OR_FETCH_2
:
8159 case BUILT_IN_ATOMIC_OR_FETCH_4
:
8160 case BUILT_IN_ATOMIC_OR_FETCH_8
:
8161 case BUILT_IN_ATOMIC_OR_FETCH_16
:
8163 enum built_in_function lib
;
8164 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
8165 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
8166 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
8167 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
8173 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
8174 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
8175 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
8176 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
8177 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
8178 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
8179 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
8180 ignore
, BUILT_IN_NONE
);
8185 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
8186 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
8187 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
8188 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
8189 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
8190 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
8191 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
8192 ignore
, BUILT_IN_NONE
);
8197 case BUILT_IN_ATOMIC_FETCH_AND_1
:
8198 case BUILT_IN_ATOMIC_FETCH_AND_2
:
8199 case BUILT_IN_ATOMIC_FETCH_AND_4
:
8200 case BUILT_IN_ATOMIC_FETCH_AND_8
:
8201 case BUILT_IN_ATOMIC_FETCH_AND_16
:
8202 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
8203 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
8204 ignore
, BUILT_IN_NONE
);
8209 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
8210 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
8211 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
8212 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
8213 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
8214 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
8215 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
8216 ignore
, BUILT_IN_NONE
);
8221 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
8222 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
8223 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
8224 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
8225 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
8226 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
8227 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
8228 ignore
, BUILT_IN_NONE
);
8233 case BUILT_IN_ATOMIC_FETCH_OR_1
:
8234 case BUILT_IN_ATOMIC_FETCH_OR_2
:
8235 case BUILT_IN_ATOMIC_FETCH_OR_4
:
8236 case BUILT_IN_ATOMIC_FETCH_OR_8
:
8237 case BUILT_IN_ATOMIC_FETCH_OR_16
:
8238 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
8239 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
8240 ignore
, BUILT_IN_NONE
);
8245 case BUILT_IN_ATOMIC_TEST_AND_SET
:
8246 return expand_builtin_atomic_test_and_set (exp
, target
);
8248 case BUILT_IN_ATOMIC_CLEAR
:
8249 return expand_builtin_atomic_clear (exp
);
8251 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8252 return expand_builtin_atomic_always_lock_free (exp
);
8254 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8255 target
= expand_builtin_atomic_is_lock_free (exp
);
8260 case BUILT_IN_ATOMIC_THREAD_FENCE
:
8261 expand_builtin_atomic_thread_fence (exp
);
8264 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
8265 expand_builtin_atomic_signal_fence (exp
);
8268 case BUILT_IN_OBJECT_SIZE
:
8269 return expand_builtin_object_size (exp
);
8271 case BUILT_IN_MEMCPY_CHK
:
8272 case BUILT_IN_MEMPCPY_CHK
:
8273 case BUILT_IN_MEMMOVE_CHK
:
8274 case BUILT_IN_MEMSET_CHK
:
8275 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
8280 case BUILT_IN_STRCPY_CHK
:
8281 case BUILT_IN_STPCPY_CHK
:
8282 case BUILT_IN_STRNCPY_CHK
:
8283 case BUILT_IN_STPNCPY_CHK
:
8284 case BUILT_IN_STRCAT_CHK
:
8285 case BUILT_IN_STRNCAT_CHK
:
8286 case BUILT_IN_SNPRINTF_CHK
:
8287 case BUILT_IN_VSNPRINTF_CHK
:
8288 maybe_emit_chk_warning (exp
, fcode
);
8291 case BUILT_IN_SPRINTF_CHK
:
8292 case BUILT_IN_VSPRINTF_CHK
:
8293 maybe_emit_sprintf_chk_warning (exp
, fcode
);
8297 if (warn_free_nonheap_object
)
8298 maybe_emit_free_warning (exp
);
8301 case BUILT_IN_THREAD_POINTER
:
8302 return expand_builtin_thread_pointer (exp
, target
);
8304 case BUILT_IN_SET_THREAD_POINTER
:
8305 expand_builtin_set_thread_pointer (exp
);
8308 case BUILT_IN_ACC_ON_DEVICE
:
8309 /* Do library call, if we failed to expand the builtin when
8313 case BUILT_IN_GOACC_PARLEVEL_ID
:
8314 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
8315 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
8317 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
8318 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
8320 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
8321 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
8322 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
8323 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
8324 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
8325 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
8326 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
8328 default: /* just do library call, if unknown builtin */
8332 /* The switch statement above can drop through to cause the function
8333 to be called normally. */
8334 return expand_call (exp
, target
, ignore
);
8337 /* Determine whether a tree node represents a call to a built-in
8338 function. If the tree T is a call to a built-in function with
8339 the right number of arguments of the appropriate types, return
8340 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8341 Otherwise the return value is END_BUILTINS. */
8343 enum built_in_function
8344 builtin_mathfn_code (const_tree t
)
8346 const_tree fndecl
, arg
, parmlist
;
8347 const_tree argtype
, parmtype
;
8348 const_call_expr_arg_iterator iter
;
8350 if (TREE_CODE (t
) != CALL_EXPR
)
8351 return END_BUILTINS
;
8353 fndecl
= get_callee_fndecl (t
);
8354 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8355 return END_BUILTINS
;
8357 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8358 init_const_call_expr_arg_iterator (t
, &iter
);
8359 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
8361 /* If a function doesn't take a variable number of arguments,
8362 the last element in the list will have type `void'. */
8363 parmtype
= TREE_VALUE (parmlist
);
8364 if (VOID_TYPE_P (parmtype
))
8366 if (more_const_call_expr_args_p (&iter
))
8367 return END_BUILTINS
;
8368 return DECL_FUNCTION_CODE (fndecl
);
8371 if (! more_const_call_expr_args_p (&iter
))
8372 return END_BUILTINS
;
8374 arg
= next_const_call_expr_arg (&iter
);
8375 argtype
= TREE_TYPE (arg
);
8377 if (SCALAR_FLOAT_TYPE_P (parmtype
))
8379 if (! SCALAR_FLOAT_TYPE_P (argtype
))
8380 return END_BUILTINS
;
8382 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
8384 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
8385 return END_BUILTINS
;
8387 else if (POINTER_TYPE_P (parmtype
))
8389 if (! POINTER_TYPE_P (argtype
))
8390 return END_BUILTINS
;
8392 else if (INTEGRAL_TYPE_P (parmtype
))
8394 if (! INTEGRAL_TYPE_P (argtype
))
8395 return END_BUILTINS
;
8398 return END_BUILTINS
;
8401 /* Variable-length argument list. */
8402 return DECL_FUNCTION_CODE (fndecl
);
8405 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8406 evaluate to a constant. */
8409 fold_builtin_constant_p (tree arg
)
8411 /* We return 1 for a numeric type that's known to be a constant
8412 value at compile-time or for an aggregate type that's a
8413 literal constant. */
8416 /* If we know this is a constant, emit the constant of one. */
8417 if (CONSTANT_CLASS_P (arg
)
8418 || (TREE_CODE (arg
) == CONSTRUCTOR
8419 && TREE_CONSTANT (arg
)))
8420 return integer_one_node
;
8421 if (TREE_CODE (arg
) == ADDR_EXPR
)
8423 tree op
= TREE_OPERAND (arg
, 0);
8424 if (TREE_CODE (op
) == STRING_CST
8425 || (TREE_CODE (op
) == ARRAY_REF
8426 && integer_zerop (TREE_OPERAND (op
, 1))
8427 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
8428 return integer_one_node
;
8431 /* If this expression has side effects, show we don't know it to be a
8432 constant. Likewise if it's a pointer or aggregate type since in
8433 those case we only want literals, since those are only optimized
8434 when generating RTL, not later.
8435 And finally, if we are compiling an initializer, not code, we
8436 need to return a definite result now; there's not going to be any
8437 more optimization done. */
8438 if (TREE_SIDE_EFFECTS (arg
)
8439 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
8440 || POINTER_TYPE_P (TREE_TYPE (arg
))
8442 || folding_initializer
8443 || force_folding_builtin_constant_p
)
8444 return integer_zero_node
;
8449 /* Create builtin_expect or builtin_expect_with_probability
8450 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8451 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8452 builtin_expect_with_probability instead uses third argument as PROBABILITY
8456 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
8457 tree predictor
, tree probability
)
8459 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
8461 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
8462 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
8463 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
8464 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
8465 pred_type
= TREE_VALUE (arg_types
);
8466 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
8468 pred
= fold_convert_loc (loc
, pred_type
, pred
);
8469 expected
= fold_convert_loc (loc
, expected_type
, expected
);
8472 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
8474 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
8477 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
8478 build_int_cst (ret_type
, 0));
8481 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8482 NULL_TREE if no simplification is possible. */
8485 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
8488 tree inner
, fndecl
, inner_arg0
;
8489 enum tree_code code
;
8491 /* Distribute the expected value over short-circuiting operators.
8492 See through the cast from truthvalue_type_node to long. */
8494 while (CONVERT_EXPR_P (inner_arg0
)
8495 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
8496 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
8497 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
8499 /* If this is a builtin_expect within a builtin_expect keep the
8500 inner one. See through a comparison against a constant. It
8501 might have been added to create a thruthvalue. */
8504 if (COMPARISON_CLASS_P (inner
)
8505 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8506 inner
= TREE_OPERAND (inner
, 0);
8508 if (TREE_CODE (inner
) == CALL_EXPR
8509 && (fndecl
= get_callee_fndecl (inner
))
8510 && (fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
8511 || fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT_WITH_PROBABILITY
)))
8515 code
= TREE_CODE (inner
);
8516 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8518 tree op0
= TREE_OPERAND (inner
, 0);
8519 tree op1
= TREE_OPERAND (inner
, 1);
8520 arg1
= save_expr (arg1
);
8522 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
8523 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
8524 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8526 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8529 /* If the argument isn't invariant then there's nothing else we can do. */
8530 if (!TREE_CONSTANT (inner_arg0
))
8533 /* If we expect that a comparison against the argument will fold to
8534 a constant return the constant. In practice, this means a true
8535 constant or the address of a non-weak symbol. */
8538 if (TREE_CODE (inner
) == ADDR_EXPR
)
8542 inner
= TREE_OPERAND (inner
, 0);
8544 while (TREE_CODE (inner
) == COMPONENT_REF
8545 || TREE_CODE (inner
) == ARRAY_REF
);
8546 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8550 /* Otherwise, ARG0 already has the proper type for the return value. */
8554 /* Fold a call to __builtin_classify_type with argument ARG. */
8557 fold_builtin_classify_type (tree arg
)
8560 return build_int_cst (integer_type_node
, no_type_class
);
8562 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8565 /* Fold a call to __builtin_strlen with argument ARG. */
8568 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
8570 if (!validate_arg (arg
, POINTER_TYPE
))
8575 memset (&data
, 0, sizeof (c_strlen_data
));
8576 tree len
= c_strlen (arg
, 0, &data
);
8579 return fold_convert_loc (loc
, type
, len
);
8582 c_strlen (arg
, 1, &data
);
8586 if (EXPR_HAS_LOCATION (arg
))
8587 loc
= EXPR_LOCATION (arg
);
8588 else if (loc
== UNKNOWN_LOCATION
)
8589 loc
= input_location
;
8590 warn_string_no_nul (loc
, "strlen", arg
, data
.decl
);
8597 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8600 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8602 REAL_VALUE_TYPE real
;
8604 /* __builtin_inff is intended to be usable to define INFINITY on all
8605 targets. If an infinity is not available, INFINITY expands "to a
8606 positive constant of type float that overflows at translation
8607 time", footnote "In this case, using INFINITY will violate the
8608 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8609 Thus we pedwarn to ensure this constraint violation is
8611 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8612 pedwarn (loc
, 0, "target format does not support infinity");
8615 return build_real (type
, real
);
8618 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8619 NULL_TREE if no simplification can be made. */
8622 fold_builtin_sincos (location_t loc
,
8623 tree arg0
, tree arg1
, tree arg2
)
8626 tree fndecl
, call
= NULL_TREE
;
8628 if (!validate_arg (arg0
, REAL_TYPE
)
8629 || !validate_arg (arg1
, POINTER_TYPE
)
8630 || !validate_arg (arg2
, POINTER_TYPE
))
8633 type
= TREE_TYPE (arg0
);
8635 /* Calculate the result when the argument is a constant. */
8636 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8637 if (fn
== END_BUILTINS
)
8640 /* Canonicalize sincos to cexpi. */
8641 if (TREE_CODE (arg0
) == REAL_CST
)
8643 tree complex_type
= build_complex_type (type
);
8644 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8648 if (!targetm
.libc_has_function (function_c99_math_complex
)
8649 || !builtin_decl_implicit_p (fn
))
8651 fndecl
= builtin_decl_explicit (fn
);
8652 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8653 call
= builtin_save_expr (call
);
8656 tree ptype
= build_pointer_type (type
);
8657 arg1
= fold_convert (ptype
, arg1
);
8658 arg2
= fold_convert (ptype
, arg2
);
8659 return build2 (COMPOUND_EXPR
, void_type_node
,
8660 build2 (MODIFY_EXPR
, void_type_node
,
8661 build_fold_indirect_ref_loc (loc
, arg1
),
8662 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8663 build2 (MODIFY_EXPR
, void_type_node
,
8664 build_fold_indirect_ref_loc (loc
, arg2
),
8665 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8668 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8669 Return NULL_TREE if no simplification can be made. */
8672 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8674 if (!validate_arg (arg1
, POINTER_TYPE
)
8675 || !validate_arg (arg2
, POINTER_TYPE
)
8676 || !validate_arg (len
, INTEGER_TYPE
))
8679 /* If the LEN parameter is zero, return zero. */
8680 if (integer_zerop (len
))
8681 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8684 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8685 if (operand_equal_p (arg1
, arg2
, 0))
8686 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8688 /* If len parameter is one, return an expression corresponding to
8689 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8690 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8692 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8693 tree cst_uchar_ptr_node
8694 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8697 = fold_convert_loc (loc
, integer_type_node
,
8698 build1 (INDIRECT_REF
, cst_uchar_node
,
8699 fold_convert_loc (loc
,
8703 = fold_convert_loc (loc
, integer_type_node
,
8704 build1 (INDIRECT_REF
, cst_uchar_node
,
8705 fold_convert_loc (loc
,
8708 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8714 /* Fold a call to builtin isascii with argument ARG. */
8717 fold_builtin_isascii (location_t loc
, tree arg
)
8719 if (!validate_arg (arg
, INTEGER_TYPE
))
8723 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8724 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8725 build_int_cst (integer_type_node
,
8726 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8727 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8728 arg
, integer_zero_node
);
8732 /* Fold a call to builtin toascii with argument ARG. */
8735 fold_builtin_toascii (location_t loc
, tree arg
)
8737 if (!validate_arg (arg
, INTEGER_TYPE
))
8740 /* Transform toascii(c) -> (c & 0x7f). */
8741 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8742 build_int_cst (integer_type_node
, 0x7f));
8745 /* Fold a call to builtin isdigit with argument ARG. */
8748 fold_builtin_isdigit (location_t loc
, tree arg
)
8750 if (!validate_arg (arg
, INTEGER_TYPE
))
8754 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8755 /* According to the C standard, isdigit is unaffected by locale.
8756 However, it definitely is affected by the target character set. */
8757 unsigned HOST_WIDE_INT target_digit0
8758 = lang_hooks
.to_target_charset ('0');
8760 if (target_digit0
== 0)
8763 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8764 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8765 build_int_cst (unsigned_type_node
, target_digit0
));
8766 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8767 build_int_cst (unsigned_type_node
, 9));
8771 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8774 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8776 if (!validate_arg (arg
, REAL_TYPE
))
8779 arg
= fold_convert_loc (loc
, type
, arg
);
8780 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8783 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8786 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8788 if (!validate_arg (arg
, INTEGER_TYPE
))
8791 arg
= fold_convert_loc (loc
, type
, arg
);
8792 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8795 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8798 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8800 if (validate_arg (arg
, COMPLEX_TYPE
)
8801 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8803 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8807 tree new_arg
= builtin_save_expr (arg
);
8808 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8809 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8810 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8817 /* Fold a call to builtin frexp, we can assume the base is 2. */
8820 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8822 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8827 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8830 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8832 /* Proceed if a valid pointer type was passed in. */
8833 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8835 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8841 /* For +-0, return (*exp = 0, +-0). */
8842 exp
= integer_zero_node
;
8847 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8848 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8851 /* Since the frexp function always expects base 2, and in
8852 GCC normalized significands are already in the range
8853 [0.5, 1.0), we have exactly what frexp wants. */
8854 REAL_VALUE_TYPE frac_rvt
= *value
;
8855 SET_REAL_EXP (&frac_rvt
, 0);
8856 frac
= build_real (rettype
, frac_rvt
);
8857 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8864 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8865 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8866 TREE_SIDE_EFFECTS (arg1
) = 1;
8867 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8873 /* Fold a call to builtin modf. */
8876 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8878 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8883 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8886 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8888 /* Proceed if a valid pointer type was passed in. */
8889 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8891 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8892 REAL_VALUE_TYPE trunc
, frac
;
8898 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8899 trunc
= frac
= *value
;
8902 /* For +-Inf, return (*arg1 = arg0, +-0). */
8904 frac
.sign
= value
->sign
;
8908 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8909 real_trunc (&trunc
, VOIDmode
, value
);
8910 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8911 /* If the original number was negative and already
8912 integral, then the fractional part is -0.0. */
8913 if (value
->sign
&& frac
.cl
== rvc_zero
)
8914 frac
.sign
= value
->sign
;
8918 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8919 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8920 build_real (rettype
, trunc
));
8921 TREE_SIDE_EFFECTS (arg1
) = 1;
8922 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8923 build_real (rettype
, frac
));
8929 /* Given a location LOC, an interclass builtin function decl FNDECL
8930 and its single argument ARG, return an folded expression computing
8931 the same, or NULL_TREE if we either couldn't or didn't want to fold
8932 (the latter happen if there's an RTL instruction available). */
8935 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8939 if (!validate_arg (arg
, REAL_TYPE
))
8942 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8945 mode
= TYPE_MODE (TREE_TYPE (arg
));
8947 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8949 /* If there is no optab, try generic code. */
8950 switch (DECL_FUNCTION_CODE (fndecl
))
8954 CASE_FLT_FN (BUILT_IN_ISINF
):
8956 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8957 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8958 tree type
= TREE_TYPE (arg
);
8962 if (is_ibm_extended
)
8964 /* NaN and Inf are encoded in the high-order double value
8965 only. The low-order value is not significant. */
8966 type
= double_type_node
;
8968 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8970 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8971 real_from_string (&r
, buf
);
8972 result
= build_call_expr (isgr_fn
, 2,
8973 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8974 build_real (type
, r
));
8977 CASE_FLT_FN (BUILT_IN_FINITE
):
8978 case BUILT_IN_ISFINITE
:
8980 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8981 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8982 tree type
= TREE_TYPE (arg
);
8986 if (is_ibm_extended
)
8988 /* NaN and Inf are encoded in the high-order double value
8989 only. The low-order value is not significant. */
8990 type
= double_type_node
;
8992 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8994 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8995 real_from_string (&r
, buf
);
8996 result
= build_call_expr (isle_fn
, 2,
8997 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8998 build_real (type
, r
));
8999 /*result = fold_build2_loc (loc, UNGT_EXPR,
9000 TREE_TYPE (TREE_TYPE (fndecl)),
9001 fold_build1_loc (loc, ABS_EXPR, type, arg),
9002 build_real (type, r));
9003 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9004 TREE_TYPE (TREE_TYPE (fndecl)),
9008 case BUILT_IN_ISNORMAL
:
9010 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9011 islessequal(fabs(x),DBL_MAX). */
9012 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9013 tree type
= TREE_TYPE (arg
);
9014 tree orig_arg
, max_exp
, min_exp
;
9015 machine_mode orig_mode
= mode
;
9016 REAL_VALUE_TYPE rmax
, rmin
;
9019 orig_arg
= arg
= builtin_save_expr (arg
);
9020 if (is_ibm_extended
)
9022 /* Use double to test the normal range of IBM extended
9023 precision. Emin for IBM extended precision is
9024 different to emin for IEEE double, being 53 higher
9025 since the low double exponent is at least 53 lower
9026 than the high double exponent. */
9027 type
= double_type_node
;
9029 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9031 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9033 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9034 real_from_string (&rmax
, buf
);
9035 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
9036 real_from_string (&rmin
, buf
);
9037 max_exp
= build_real (type
, rmax
);
9038 min_exp
= build_real (type
, rmin
);
9040 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
9041 if (is_ibm_extended
)
9043 /* Testing the high end of the range is done just using
9044 the high double, using the same test as isfinite().
9045 For the subnormal end of the range we first test the
9046 high double, then if its magnitude is equal to the
9047 limit of 0x1p-969, we test whether the low double is
9048 non-zero and opposite sign to the high double. */
9049 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
9050 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9051 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
9052 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
9054 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
9055 complex_double_type_node
, orig_arg
);
9056 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
9057 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
9058 tree zero
= build_real (type
, dconst0
);
9059 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
9060 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
9061 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
9062 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
9063 fold_build3 (COND_EXPR
,
9066 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
9068 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
9074 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9075 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
9077 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
9088 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9089 ARG is the argument for the call. */
9092 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9094 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9096 if (!validate_arg (arg
, REAL_TYPE
))
9099 switch (builtin_index
)
9101 case BUILT_IN_ISINF
:
9102 if (!HONOR_INFINITIES (arg
))
9103 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9107 case BUILT_IN_ISINF_SIGN
:
9109 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9110 /* In a boolean context, GCC will fold the inner COND_EXPR to
9111 1. So e.g. "if (isinf_sign(x))" would be folded to just
9112 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9113 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
9114 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9115 tree tmp
= NULL_TREE
;
9117 arg
= builtin_save_expr (arg
);
9119 if (signbit_fn
&& isinf_fn
)
9121 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9122 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9124 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9125 signbit_call
, integer_zero_node
);
9126 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9127 isinf_call
, integer_zero_node
);
9129 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9130 integer_minus_one_node
, integer_one_node
);
9131 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9139 case BUILT_IN_ISFINITE
:
9140 if (!HONOR_NANS (arg
)
9141 && !HONOR_INFINITIES (arg
))
9142 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9146 case BUILT_IN_ISNAN
:
9147 if (!HONOR_NANS (arg
))
9148 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9151 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
9152 if (is_ibm_extended
)
9154 /* NaN and Inf are encoded in the high-order double value
9155 only. The low-order value is not significant. */
9156 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
9159 arg
= builtin_save_expr (arg
);
9160 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9167 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9168 This builtin will generate code to return the appropriate floating
9169 point classification depending on the value of the floating point
9170 number passed in. The possible return values must be supplied as
9171 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9172 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9173 one floating point argument which is "type generic". */
9176 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9178 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9179 arg
, type
, res
, tmp
;
9184 /* Verify the required arguments in the original call. */
9186 || !validate_arg (args
[0], INTEGER_TYPE
)
9187 || !validate_arg (args
[1], INTEGER_TYPE
)
9188 || !validate_arg (args
[2], INTEGER_TYPE
)
9189 || !validate_arg (args
[3], INTEGER_TYPE
)
9190 || !validate_arg (args
[4], INTEGER_TYPE
)
9191 || !validate_arg (args
[5], REAL_TYPE
))
9195 fp_infinite
= args
[1];
9196 fp_normal
= args
[2];
9197 fp_subnormal
= args
[3];
9200 type
= TREE_TYPE (arg
);
9201 mode
= TYPE_MODE (type
);
9202 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9206 (fabs(x) == Inf ? FP_INFINITE :
9207 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9208 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9210 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9211 build_real (type
, dconst0
));
9212 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9213 tmp
, fp_zero
, fp_subnormal
);
9215 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9216 real_from_string (&r
, buf
);
9217 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9218 arg
, build_real (type
, r
));
9219 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9221 if (HONOR_INFINITIES (mode
))
9224 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9225 build_real (type
, r
));
9226 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9230 if (HONOR_NANS (mode
))
9232 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9233 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9239 /* Fold a call to an unordered comparison function such as
9240 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9241 being called and ARG0 and ARG1 are the arguments for the call.
9242 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9243 the opposite of the desired result. UNORDERED_CODE is used
9244 for modes that can hold NaNs and ORDERED_CODE is used for
9248 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9249 enum tree_code unordered_code
,
9250 enum tree_code ordered_code
)
9252 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9253 enum tree_code code
;
9255 enum tree_code code0
, code1
;
9256 tree cmp_type
= NULL_TREE
;
9258 type0
= TREE_TYPE (arg0
);
9259 type1
= TREE_TYPE (arg1
);
9261 code0
= TREE_CODE (type0
);
9262 code1
= TREE_CODE (type1
);
9264 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9265 /* Choose the wider of two real types. */
9266 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9268 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9270 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9273 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9274 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9276 if (unordered_code
== UNORDERED_EXPR
)
9278 if (!HONOR_NANS (arg0
))
9279 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9280 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9283 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9284 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9285 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9288 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9289 arithmetics if it can never overflow, or into internal functions that
9290 return both result of arithmetics and overflowed boolean flag in
9291 a complex integer result, or some other check for overflow.
9292 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9293 checking part of that. */
9296 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9297 tree arg0
, tree arg1
, tree arg2
)
9299 enum internal_fn ifn
= IFN_LAST
;
9300 /* The code of the expression corresponding to the type-generic
9301 built-in, or ERROR_MARK for the type-specific ones. */
9302 enum tree_code opcode
= ERROR_MARK
;
9303 bool ovf_only
= false;
9307 case BUILT_IN_ADD_OVERFLOW_P
:
9310 case BUILT_IN_ADD_OVERFLOW
:
9313 case BUILT_IN_SADD_OVERFLOW
:
9314 case BUILT_IN_SADDL_OVERFLOW
:
9315 case BUILT_IN_SADDLL_OVERFLOW
:
9316 case BUILT_IN_UADD_OVERFLOW
:
9317 case BUILT_IN_UADDL_OVERFLOW
:
9318 case BUILT_IN_UADDLL_OVERFLOW
:
9319 ifn
= IFN_ADD_OVERFLOW
;
9321 case BUILT_IN_SUB_OVERFLOW_P
:
9324 case BUILT_IN_SUB_OVERFLOW
:
9325 opcode
= MINUS_EXPR
;
9327 case BUILT_IN_SSUB_OVERFLOW
:
9328 case BUILT_IN_SSUBL_OVERFLOW
:
9329 case BUILT_IN_SSUBLL_OVERFLOW
:
9330 case BUILT_IN_USUB_OVERFLOW
:
9331 case BUILT_IN_USUBL_OVERFLOW
:
9332 case BUILT_IN_USUBLL_OVERFLOW
:
9333 ifn
= IFN_SUB_OVERFLOW
;
9335 case BUILT_IN_MUL_OVERFLOW_P
:
9338 case BUILT_IN_MUL_OVERFLOW
:
9341 case BUILT_IN_SMUL_OVERFLOW
:
9342 case BUILT_IN_SMULL_OVERFLOW
:
9343 case BUILT_IN_SMULLL_OVERFLOW
:
9344 case BUILT_IN_UMUL_OVERFLOW
:
9345 case BUILT_IN_UMULL_OVERFLOW
:
9346 case BUILT_IN_UMULLL_OVERFLOW
:
9347 ifn
= IFN_MUL_OVERFLOW
;
9353 /* For the "generic" overloads, the first two arguments can have different
9354 types and the last argument determines the target type to use to check
9355 for overflow. The arguments of the other overloads all have the same
9357 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
9359 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9360 arguments are constant, attempt to fold the built-in call into a constant
9361 expression indicating whether or not it detected an overflow. */
9363 && TREE_CODE (arg0
) == INTEGER_CST
9364 && TREE_CODE (arg1
) == INTEGER_CST
)
9365 /* Perform the computation in the target type and check for overflow. */
9366 return omit_one_operand_loc (loc
, boolean_type_node
,
9367 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9368 ? boolean_true_node
: boolean_false_node
,
9371 tree ctype
= build_complex_type (type
);
9372 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9374 tree tgt
= save_expr (call
);
9375 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9376 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9377 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9380 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
9382 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9384 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9385 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9388 /* Fold a call to __builtin_FILE to a constant string. */
9391 fold_builtin_FILE (location_t loc
)
9393 if (const char *fname
= LOCATION_FILE (loc
))
9395 /* The documentation says this builtin is equivalent to the preprocessor
9396 __FILE__ macro so it appears appropriate to use the same file prefix
9398 fname
= remap_macro_filename (fname
);
9399 return build_string_literal (strlen (fname
) + 1, fname
);
9402 return build_string_literal (1, "");
9405 /* Fold a call to __builtin_FUNCTION to a constant string. */
9408 fold_builtin_FUNCTION ()
9410 const char *name
= "";
9412 if (current_function_decl
)
9413 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
9415 return build_string_literal (strlen (name
) + 1, name
);
9418 /* Fold a call to __builtin_LINE to an integer constant. */
9421 fold_builtin_LINE (location_t loc
, tree type
)
9423 return build_int_cst (type
, LOCATION_LINE (loc
));
9426 /* Fold a call to built-in function FNDECL with 0 arguments.
9427 This function returns NULL_TREE if no simplification was possible. */
9430 fold_builtin_0 (location_t loc
, tree fndecl
)
9432 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9433 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9437 return fold_builtin_FILE (loc
);
9439 case BUILT_IN_FUNCTION
:
9440 return fold_builtin_FUNCTION ();
9443 return fold_builtin_LINE (loc
, type
);
9445 CASE_FLT_FN (BUILT_IN_INF
):
9446 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
9447 case BUILT_IN_INFD32
:
9448 case BUILT_IN_INFD64
:
9449 case BUILT_IN_INFD128
:
9450 return fold_builtin_inf (loc
, type
, true);
9452 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9453 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
9454 return fold_builtin_inf (loc
, type
, false);
9456 case BUILT_IN_CLASSIFY_TYPE
:
9457 return fold_builtin_classify_type (NULL_TREE
);
9465 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9466 This function returns NULL_TREE if no simplification was possible. */
9469 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9471 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9472 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9474 if (TREE_CODE (arg0
) == ERROR_MARK
)
9477 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
9482 case BUILT_IN_CONSTANT_P
:
9484 tree val
= fold_builtin_constant_p (arg0
);
9486 /* Gimplification will pull the CALL_EXPR for the builtin out of
9487 an if condition. When not optimizing, we'll not CSE it back.
9488 To avoid link error types of regressions, return false now. */
9489 if (!val
&& !optimize
)
9490 val
= integer_zero_node
;
9495 case BUILT_IN_CLASSIFY_TYPE
:
9496 return fold_builtin_classify_type (arg0
);
9498 case BUILT_IN_STRLEN
:
9499 return fold_builtin_strlen (loc
, type
, arg0
);
9501 CASE_FLT_FN (BUILT_IN_FABS
):
9502 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
9503 case BUILT_IN_FABSD32
:
9504 case BUILT_IN_FABSD64
:
9505 case BUILT_IN_FABSD128
:
9506 return fold_builtin_fabs (loc
, arg0
, type
);
9510 case BUILT_IN_LLABS
:
9511 case BUILT_IN_IMAXABS
:
9512 return fold_builtin_abs (loc
, arg0
, type
);
9514 CASE_FLT_FN (BUILT_IN_CONJ
):
9515 if (validate_arg (arg0
, COMPLEX_TYPE
)
9516 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9517 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9520 CASE_FLT_FN (BUILT_IN_CREAL
):
9521 if (validate_arg (arg0
, COMPLEX_TYPE
)
9522 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9523 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9526 CASE_FLT_FN (BUILT_IN_CIMAG
):
9527 if (validate_arg (arg0
, COMPLEX_TYPE
)
9528 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9529 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9532 CASE_FLT_FN (BUILT_IN_CARG
):
9533 return fold_builtin_carg (loc
, arg0
, type
);
9535 case BUILT_IN_ISASCII
:
9536 return fold_builtin_isascii (loc
, arg0
);
9538 case BUILT_IN_TOASCII
:
9539 return fold_builtin_toascii (loc
, arg0
);
9541 case BUILT_IN_ISDIGIT
:
9542 return fold_builtin_isdigit (loc
, arg0
);
9544 CASE_FLT_FN (BUILT_IN_FINITE
):
9545 case BUILT_IN_FINITED32
:
9546 case BUILT_IN_FINITED64
:
9547 case BUILT_IN_FINITED128
:
9548 case BUILT_IN_ISFINITE
:
9550 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9553 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9556 CASE_FLT_FN (BUILT_IN_ISINF
):
9557 case BUILT_IN_ISINFD32
:
9558 case BUILT_IN_ISINFD64
:
9559 case BUILT_IN_ISINFD128
:
9561 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9564 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9567 case BUILT_IN_ISNORMAL
:
9568 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9570 case BUILT_IN_ISINF_SIGN
:
9571 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9573 CASE_FLT_FN (BUILT_IN_ISNAN
):
9574 case BUILT_IN_ISNAND32
:
9575 case BUILT_IN_ISNAND64
:
9576 case BUILT_IN_ISNAND128
:
9577 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9580 if (integer_zerop (arg0
))
9581 return build_empty_stmt (loc
);
9592 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9593 This function returns NULL_TREE if no simplification was possible. */
9596 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9598 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9599 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9601 if (TREE_CODE (arg0
) == ERROR_MARK
9602 || TREE_CODE (arg1
) == ERROR_MARK
)
9605 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9610 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9611 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9612 if (validate_arg (arg0
, REAL_TYPE
)
9613 && validate_arg (arg1
, POINTER_TYPE
))
9614 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9617 CASE_FLT_FN (BUILT_IN_FREXP
):
9618 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9620 CASE_FLT_FN (BUILT_IN_MODF
):
9621 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9623 case BUILT_IN_STRSPN
:
9624 return fold_builtin_strspn (loc
, arg0
, arg1
);
9626 case BUILT_IN_STRCSPN
:
9627 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9629 case BUILT_IN_STRPBRK
:
9630 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9632 case BUILT_IN_EXPECT
:
9633 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
9635 case BUILT_IN_ISGREATER
:
9636 return fold_builtin_unordered_cmp (loc
, fndecl
,
9637 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9638 case BUILT_IN_ISGREATEREQUAL
:
9639 return fold_builtin_unordered_cmp (loc
, fndecl
,
9640 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9641 case BUILT_IN_ISLESS
:
9642 return fold_builtin_unordered_cmp (loc
, fndecl
,
9643 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9644 case BUILT_IN_ISLESSEQUAL
:
9645 return fold_builtin_unordered_cmp (loc
, fndecl
,
9646 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9647 case BUILT_IN_ISLESSGREATER
:
9648 return fold_builtin_unordered_cmp (loc
, fndecl
,
9649 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9650 case BUILT_IN_ISUNORDERED
:
9651 return fold_builtin_unordered_cmp (loc
, fndecl
,
9652 arg0
, arg1
, UNORDERED_EXPR
,
9655 /* We do the folding for va_start in the expander. */
9656 case BUILT_IN_VA_START
:
9659 case BUILT_IN_OBJECT_SIZE
:
9660 return fold_builtin_object_size (arg0
, arg1
);
9662 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9663 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9665 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9666 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9674 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9676 This function returns NULL_TREE if no simplification was possible. */
9679 fold_builtin_3 (location_t loc
, tree fndecl
,
9680 tree arg0
, tree arg1
, tree arg2
)
9682 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9683 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9685 if (TREE_CODE (arg0
) == ERROR_MARK
9686 || TREE_CODE (arg1
) == ERROR_MARK
9687 || TREE_CODE (arg2
) == ERROR_MARK
)
9690 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9697 CASE_FLT_FN (BUILT_IN_SINCOS
):
9698 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9700 CASE_FLT_FN (BUILT_IN_REMQUO
):
9701 if (validate_arg (arg0
, REAL_TYPE
)
9702 && validate_arg (arg1
, REAL_TYPE
)
9703 && validate_arg (arg2
, POINTER_TYPE
))
9704 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9707 case BUILT_IN_MEMCMP
:
9708 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9710 case BUILT_IN_EXPECT
:
9711 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
9713 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
9714 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
9716 case BUILT_IN_ADD_OVERFLOW
:
9717 case BUILT_IN_SUB_OVERFLOW
:
9718 case BUILT_IN_MUL_OVERFLOW
:
9719 case BUILT_IN_ADD_OVERFLOW_P
:
9720 case BUILT_IN_SUB_OVERFLOW_P
:
9721 case BUILT_IN_MUL_OVERFLOW_P
:
9722 case BUILT_IN_SADD_OVERFLOW
:
9723 case BUILT_IN_SADDL_OVERFLOW
:
9724 case BUILT_IN_SADDLL_OVERFLOW
:
9725 case BUILT_IN_SSUB_OVERFLOW
:
9726 case BUILT_IN_SSUBL_OVERFLOW
:
9727 case BUILT_IN_SSUBLL_OVERFLOW
:
9728 case BUILT_IN_SMUL_OVERFLOW
:
9729 case BUILT_IN_SMULL_OVERFLOW
:
9730 case BUILT_IN_SMULLL_OVERFLOW
:
9731 case BUILT_IN_UADD_OVERFLOW
:
9732 case BUILT_IN_UADDL_OVERFLOW
:
9733 case BUILT_IN_UADDLL_OVERFLOW
:
9734 case BUILT_IN_USUB_OVERFLOW
:
9735 case BUILT_IN_USUBL_OVERFLOW
:
9736 case BUILT_IN_USUBLL_OVERFLOW
:
9737 case BUILT_IN_UMUL_OVERFLOW
:
9738 case BUILT_IN_UMULL_OVERFLOW
:
9739 case BUILT_IN_UMULLL_OVERFLOW
:
9740 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9748 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9749 arguments. IGNORE is true if the result of the
9750 function call is ignored. This function returns NULL_TREE if no
9751 simplification was possible. */
9754 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9756 tree ret
= NULL_TREE
;
9761 ret
= fold_builtin_0 (loc
, fndecl
);
9764 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9767 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9770 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9773 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9778 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9779 SET_EXPR_LOCATION (ret
, loc
);
9785 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9786 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9787 of arguments in ARGS to be omitted. OLDNARGS is the number of
9788 elements in ARGS. */
9791 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9792 int skip
, tree fndecl
, int n
, va_list newargs
)
9794 int nargs
= oldnargs
- skip
+ n
;
9801 buffer
= XALLOCAVEC (tree
, nargs
);
9802 for (i
= 0; i
< n
; i
++)
9803 buffer
[i
] = va_arg (newargs
, tree
);
9804 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9805 buffer
[i
] = args
[j
];
9808 buffer
= args
+ skip
;
9810 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9813 /* Return true if FNDECL shouldn't be folded right now.
9814 If a built-in function has an inline attribute always_inline
9815 wrapper, defer folding it after always_inline functions have
9816 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9817 might not be performed. */
9820 avoid_folding_inline_builtin (tree fndecl
)
9822 return (DECL_DECLARED_INLINE_P (fndecl
)
9823 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9825 && !cfun
->always_inline_functions_inlined
9826 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9829 /* A wrapper function for builtin folding that prevents warnings for
9830 "statement without effect" and the like, caused by removing the
9831 call node earlier than the warning is generated. */
9834 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9836 tree ret
= NULL_TREE
;
9837 tree fndecl
= get_callee_fndecl (exp
);
9838 if (fndecl
&& fndecl_built_in_p (fndecl
)
9839 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9840 yet. Defer folding until we see all the arguments
9841 (after inlining). */
9842 && !CALL_EXPR_VA_ARG_PACK (exp
))
9844 int nargs
= call_expr_nargs (exp
);
9846 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9847 instead last argument is __builtin_va_arg_pack (). Defer folding
9848 even in that case, until arguments are finalized. */
9849 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9851 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9852 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9856 if (avoid_folding_inline_builtin (fndecl
))
9859 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9860 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9861 CALL_EXPR_ARGP (exp
), ignore
);
9864 tree
*args
= CALL_EXPR_ARGP (exp
);
9865 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9873 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9874 N arguments are passed in the array ARGARRAY. Return a folded
9875 expression or NULL_TREE if no simplification was possible. */
9878 fold_builtin_call_array (location_t loc
, tree
,
9883 if (TREE_CODE (fn
) != ADDR_EXPR
)
9886 tree fndecl
= TREE_OPERAND (fn
, 0);
9887 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9888 && fndecl_built_in_p (fndecl
))
9890 /* If last argument is __builtin_va_arg_pack (), arguments to this
9891 function are not finalized yet. Defer folding until they are. */
9892 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9894 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9895 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9898 if (avoid_folding_inline_builtin (fndecl
))
9900 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9901 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9903 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9909 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9910 along with N new arguments specified as the "..." parameters. SKIP
9911 is the number of arguments in EXP to be omitted. This function is used
9912 to do varargs-to-varargs transformations. */
9915 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9921 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9922 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9928 /* Validate a single argument ARG against a tree code CODE representing
9929 a type. Return true when argument is valid. */
9932 validate_arg (const_tree arg
, enum tree_code code
)
9936 else if (code
== POINTER_TYPE
)
9937 return POINTER_TYPE_P (TREE_TYPE (arg
));
9938 else if (code
== INTEGER_TYPE
)
9939 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9940 return code
== TREE_CODE (TREE_TYPE (arg
));
9943 /* This function validates the types of a function call argument list
9944 against a specified list of tree_codes. If the last specifier is a 0,
9945 that represents an ellipses, otherwise the last specifier must be a
9948 This is the GIMPLE version of validate_arglist. Eventually we want to
9949 completely convert builtins.c to work from GIMPLEs and the tree based
9950 validate_arglist will then be removed. */
9953 validate_gimple_arglist (const gcall
*call
, ...)
9955 enum tree_code code
;
9961 va_start (ap
, call
);
9966 code
= (enum tree_code
) va_arg (ap
, int);
9970 /* This signifies an ellipses, any further arguments are all ok. */
9974 /* This signifies an endlink, if no arguments remain, return
9975 true, otherwise return false. */
9976 res
= (i
== gimple_call_num_args (call
));
9979 /* If no parameters remain or the parameter's code does not
9980 match the specified code, return false. Otherwise continue
9981 checking any remaining arguments. */
9982 arg
= gimple_call_arg (call
, i
++);
9983 if (!validate_arg (arg
, code
))
9990 /* We need gotos here since we can only have one VA_CLOSE in a
9998 /* Default target-specific builtin expander that does nothing. */
10001 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10002 rtx target ATTRIBUTE_UNUSED
,
10003 rtx subtarget ATTRIBUTE_UNUSED
,
10004 machine_mode mode ATTRIBUTE_UNUSED
,
10005 int ignore ATTRIBUTE_UNUSED
)
10010 /* Returns true is EXP represents data that would potentially reside
10011 in a readonly section. */
10014 readonly_data_expr (tree exp
)
10018 if (TREE_CODE (exp
) != ADDR_EXPR
)
10021 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10025 /* Make sure we call decl_readonly_section only for trees it
10026 can handle (since it returns true for everything it doesn't
10028 if (TREE_CODE (exp
) == STRING_CST
10029 || TREE_CODE (exp
) == CONSTRUCTOR
10030 || (VAR_P (exp
) && TREE_STATIC (exp
)))
10031 return decl_readonly_section (exp
, 0);
10036 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10037 to the call, and TYPE is its return type.
10039 Return NULL_TREE if no simplification was possible, otherwise return the
10040 simplified form of the call as a tree.
10042 The simplified form may be a constant or other expression which
10043 computes the same value, but in a more efficient manner (including
10044 calls to other builtin functions).
10046 The call may contain arguments which need to be evaluated, but
10047 which are not useful to determine the result of the call. In
10048 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10049 COMPOUND_EXPR will be an argument which must be evaluated.
10050 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10051 COMPOUND_EXPR in the chain will contain the tree for the simplified
10052 form of the builtin function call. */
10055 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10057 if (!validate_arg (s1
, POINTER_TYPE
)
10058 || !validate_arg (s2
, POINTER_TYPE
))
10063 const char *p1
, *p2
;
10065 p2
= c_getstr (s2
);
10069 p1
= c_getstr (s1
);
10072 const char *r
= strpbrk (p1
, p2
);
10076 return build_int_cst (TREE_TYPE (s1
), 0);
10078 /* Return an offset into the constant string argument. */
10079 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10080 return fold_convert_loc (loc
, type
, tem
);
10084 /* strpbrk(x, "") == NULL.
10085 Evaluate and ignore s1 in case it had side-effects. */
10086 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
10089 return NULL_TREE
; /* Really call strpbrk. */
10091 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10095 /* New argument list transforming strpbrk(s1, s2) to
10096 strchr(s1, s2[0]). */
10097 return build_call_expr_loc (loc
, fn
, 2, s1
,
10098 build_int_cst (integer_type_node
, p2
[0]));
10102 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10105 Return NULL_TREE if no simplification was possible, otherwise return the
10106 simplified form of the call as a tree.
10108 The simplified form may be a constant or other expression which
10109 computes the same value, but in a more efficient manner (including
10110 calls to other builtin functions).
10112 The call may contain arguments which need to be evaluated, but
10113 which are not useful to determine the result of the call. In
10114 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10115 COMPOUND_EXPR will be an argument which must be evaluated.
10116 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10117 COMPOUND_EXPR in the chain will contain the tree for the simplified
10118 form of the builtin function call. */
10121 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10123 if (!validate_arg (s1
, POINTER_TYPE
)
10124 || !validate_arg (s2
, POINTER_TYPE
))
10128 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10130 /* If either argument is "", return NULL_TREE. */
10131 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10132 /* Evaluate and ignore both arguments in case either one has
10134 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10140 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10143 Return NULL_TREE if no simplification was possible, otherwise return the
10144 simplified form of the call as a tree.
10146 The simplified form may be a constant or other expression which
10147 computes the same value, but in a more efficient manner (including
10148 calls to other builtin functions).
10150 The call may contain arguments which need to be evaluated, but
10151 which are not useful to determine the result of the call. In
10152 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10153 COMPOUND_EXPR will be an argument which must be evaluated.
10154 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10155 COMPOUND_EXPR in the chain will contain the tree for the simplified
10156 form of the builtin function call. */
10159 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10161 if (!validate_arg (s1
, POINTER_TYPE
)
10162 || !validate_arg (s2
, POINTER_TYPE
))
10166 /* If the first argument is "", return NULL_TREE. */
10167 const char *p1
= c_getstr (s1
);
10168 if (p1
&& *p1
== '\0')
10170 /* Evaluate and ignore argument s2 in case it has
10172 return omit_one_operand_loc (loc
, size_type_node
,
10173 size_zero_node
, s2
);
10176 /* If the second argument is "", return __builtin_strlen(s1). */
10177 const char *p2
= c_getstr (s2
);
10178 if (p2
&& *p2
== '\0')
10180 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10182 /* If the replacement _DECL isn't initialized, don't do the
10187 return build_call_expr_loc (loc
, fn
, 1, s1
);
10193 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10194 produced. False otherwise. This is done so that we don't output the error
10195 or warning twice or three times. */
10198 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10200 tree fntype
= TREE_TYPE (current_function_decl
);
10201 int nargs
= call_expr_nargs (exp
);
10203 /* There is good chance the current input_location points inside the
10204 definition of the va_start macro (perhaps on the token for
10205 builtin) in a system header, so warnings will not be emitted.
10206 Use the location in real source code. */
10207 location_t current_location
=
10208 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10211 if (!stdarg_p (fntype
))
10213 error ("%<va_start%> used in function with fixed args");
10219 if (va_start_p
&& (nargs
!= 2))
10221 error ("wrong number of arguments to function %<va_start%>");
10224 arg
= CALL_EXPR_ARG (exp
, 1);
10226 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10227 when we checked the arguments and if needed issued a warning. */
10232 /* Evidently an out of date version of <stdarg.h>; can't validate
10233 va_start's second argument, but can still work as intended. */
10234 warning_at (current_location
,
10236 "%<__builtin_next_arg%> called without an argument");
10239 else if (nargs
> 1)
10241 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10244 arg
= CALL_EXPR_ARG (exp
, 0);
10247 if (TREE_CODE (arg
) == SSA_NAME
)
10248 arg
= SSA_NAME_VAR (arg
);
10250 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10251 or __builtin_next_arg (0) the first time we see it, after checking
10252 the arguments and if needed issuing a warning. */
10253 if (!integer_zerop (arg
))
10255 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10257 /* Strip off all nops for the sake of the comparison. This
10258 is not quite the same as STRIP_NOPS. It does more.
10259 We must also strip off INDIRECT_EXPR for C++ reference
10261 while (CONVERT_EXPR_P (arg
)
10262 || TREE_CODE (arg
) == INDIRECT_REF
)
10263 arg
= TREE_OPERAND (arg
, 0);
10264 if (arg
!= last_parm
)
10266 /* FIXME: Sometimes with the tree optimizers we can get the
10267 not the last argument even though the user used the last
10268 argument. We just warn and set the arg to be the last
10269 argument so that we will get wrong-code because of
10271 warning_at (current_location
,
10273 "second parameter of %<va_start%> not last named argument");
10276 /* Undefined by C99 7.15.1.4p4 (va_start):
10277 "If the parameter parmN is declared with the register storage
10278 class, with a function or array type, or with a type that is
10279 not compatible with the type that results after application of
10280 the default argument promotions, the behavior is undefined."
10282 else if (DECL_REGISTER (arg
))
10284 warning_at (current_location
,
10286 "undefined behavior when second parameter of "
10287 "%<va_start%> is declared with %<register%> storage");
10290 /* We want to verify the second parameter just once before the tree
10291 optimizers are run and then avoid keeping it in the tree,
10292 as otherwise we could warn even for correct code like:
10293 void foo (int i, ...)
10294 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10296 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10298 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10304 /* Expand a call EXP to __builtin_object_size. */
10307 expand_builtin_object_size (tree exp
)
10310 int object_size_type
;
10311 tree fndecl
= get_callee_fndecl (exp
);
10313 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10315 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10317 expand_builtin_trap ();
10321 ost
= CALL_EXPR_ARG (exp
, 1);
10324 if (TREE_CODE (ost
) != INTEGER_CST
10325 || tree_int_cst_sgn (ost
) < 0
10326 || compare_tree_int (ost
, 3) > 0)
10328 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10330 expand_builtin_trap ();
10334 object_size_type
= tree_to_shwi (ost
);
10336 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10339 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10340 FCODE is the BUILT_IN_* to use.
10341 Return NULL_RTX if we failed; the caller should emit a normal call,
10342 otherwise try to get the result in TARGET, if convenient (and in
10343 mode MODE if that's convenient). */
10346 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10347 enum built_in_function fcode
)
10349 if (!validate_arglist (exp
,
10351 fcode
== BUILT_IN_MEMSET_CHK
10352 ? INTEGER_TYPE
: POINTER_TYPE
,
10353 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10356 tree dest
= CALL_EXPR_ARG (exp
, 0);
10357 tree src
= CALL_EXPR_ARG (exp
, 1);
10358 tree len
= CALL_EXPR_ARG (exp
, 2);
10359 tree size
= CALL_EXPR_ARG (exp
, 3);
10361 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
10362 /*str=*/NULL_TREE
, size
);
10364 if (!tree_fits_uhwi_p (size
))
10367 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10369 /* Avoid transforming the checking call to an ordinary one when
10370 an overflow has been detected or when the call couldn't be
10371 validated because the size is not constant. */
10372 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10375 tree fn
= NULL_TREE
;
10376 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10377 mem{cpy,pcpy,move,set} is available. */
10380 case BUILT_IN_MEMCPY_CHK
:
10381 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10383 case BUILT_IN_MEMPCPY_CHK
:
10384 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10386 case BUILT_IN_MEMMOVE_CHK
:
10387 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10389 case BUILT_IN_MEMSET_CHK
:
10390 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10399 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10400 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10401 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10402 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10404 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10408 unsigned int dest_align
= get_pointer_alignment (dest
);
10410 /* If DEST is not a pointer type, call the normal function. */
10411 if (dest_align
== 0)
10414 /* If SRC and DEST are the same (and not volatile), do nothing. */
10415 if (operand_equal_p (src
, dest
, 0))
10419 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10421 /* Evaluate and ignore LEN in case it has side-effects. */
10422 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10423 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10426 expr
= fold_build_pointer_plus (dest
, len
);
10427 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10430 /* __memmove_chk special case. */
10431 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10433 unsigned int src_align
= get_pointer_alignment (src
);
10435 if (src_align
== 0)
10438 /* If src is categorized for a readonly section we can use
10439 normal __memcpy_chk. */
10440 if (readonly_data_expr (src
))
10442 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10445 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10446 dest
, src
, len
, size
);
10447 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10448 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10449 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10456 /* Emit warning if a buffer overflow is detected at compile time. */
10459 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10461 /* The source string. */
10462 tree srcstr
= NULL_TREE
;
10463 /* The size of the destination object. */
10464 tree objsize
= NULL_TREE
;
10465 /* The string that is being concatenated with (as in __strcat_chk)
10466 or null if it isn't. */
10467 tree catstr
= NULL_TREE
;
10468 /* The maximum length of the source sequence in a bounded operation
10469 (such as __strncat_chk) or null if the operation isn't bounded
10470 (such as __strcat_chk). */
10471 tree maxread
= NULL_TREE
;
10472 /* The exact size of the access (such as in __strncpy_chk). */
10473 tree size
= NULL_TREE
;
10477 case BUILT_IN_STRCPY_CHK
:
10478 case BUILT_IN_STPCPY_CHK
:
10479 srcstr
= CALL_EXPR_ARG (exp
, 1);
10480 objsize
= CALL_EXPR_ARG (exp
, 2);
10483 case BUILT_IN_STRCAT_CHK
:
10484 /* For __strcat_chk the warning will be emitted only if overflowing
10485 by at least strlen (dest) + 1 bytes. */
10486 catstr
= CALL_EXPR_ARG (exp
, 0);
10487 srcstr
= CALL_EXPR_ARG (exp
, 1);
10488 objsize
= CALL_EXPR_ARG (exp
, 2);
10491 case BUILT_IN_STRNCAT_CHK
:
10492 catstr
= CALL_EXPR_ARG (exp
, 0);
10493 srcstr
= CALL_EXPR_ARG (exp
, 1);
10494 maxread
= CALL_EXPR_ARG (exp
, 2);
10495 objsize
= CALL_EXPR_ARG (exp
, 3);
10498 case BUILT_IN_STRNCPY_CHK
:
10499 case BUILT_IN_STPNCPY_CHK
:
10500 srcstr
= CALL_EXPR_ARG (exp
, 1);
10501 size
= CALL_EXPR_ARG (exp
, 2);
10502 objsize
= CALL_EXPR_ARG (exp
, 3);
10505 case BUILT_IN_SNPRINTF_CHK
:
10506 case BUILT_IN_VSNPRINTF_CHK
:
10507 maxread
= CALL_EXPR_ARG (exp
, 1);
10508 objsize
= CALL_EXPR_ARG (exp
, 3);
10511 gcc_unreachable ();
10514 if (catstr
&& maxread
)
10516 /* Check __strncat_chk. There is no way to determine the length
10517 of the string to which the source string is being appended so
10518 just warn when the length of the source string is not known. */
10519 check_strncat_sizes (exp
, objsize
);
10523 /* The destination argument is the first one for all built-ins above. */
10524 tree dst
= CALL_EXPR_ARG (exp
, 0);
10526 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
10529 /* Emit warning if a buffer overflow is detected at compile time
10530 in __sprintf_chk/__vsprintf_chk calls. */
10533 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10535 tree size
, len
, fmt
;
10536 const char *fmt_str
;
10537 int nargs
= call_expr_nargs (exp
);
10539 /* Verify the required arguments in the original call. */
10543 size
= CALL_EXPR_ARG (exp
, 2);
10544 fmt
= CALL_EXPR_ARG (exp
, 3);
10546 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10549 /* Check whether the format is a literal string constant. */
10550 fmt_str
= c_getstr (fmt
);
10551 if (fmt_str
== NULL
)
10554 if (!init_target_chars ())
10557 /* If the format doesn't contain % args or %%, we know its size. */
10558 if (strchr (fmt_str
, target_percent
) == 0)
10559 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10560 /* If the format is "%s" and first ... argument is a string literal,
10562 else if (fcode
== BUILT_IN_SPRINTF_CHK
10563 && strcmp (fmt_str
, target_percent_s
) == 0)
10569 arg
= CALL_EXPR_ARG (exp
, 4);
10570 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10573 len
= c_strlen (arg
, 1);
10574 if (!len
|| ! tree_fits_uhwi_p (len
))
10580 /* Add one for the terminating nul. */
10581 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10583 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
10584 /*maxread=*/NULL_TREE
, len
, size
);
10587 /* Emit warning if a free is called with address of a variable. */
10590 maybe_emit_free_warning (tree exp
)
10592 tree arg
= CALL_EXPR_ARG (exp
, 0);
10595 if (TREE_CODE (arg
) != ADDR_EXPR
)
10598 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10599 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10602 if (SSA_VAR_P (arg
))
10603 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10604 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10606 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10607 "%Kattempt to free a non-heap object", exp
);
10610 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10614 fold_builtin_object_size (tree ptr
, tree ost
)
10616 unsigned HOST_WIDE_INT bytes
;
10617 int object_size_type
;
10619 if (!validate_arg (ptr
, POINTER_TYPE
)
10620 || !validate_arg (ost
, INTEGER_TYPE
))
10625 if (TREE_CODE (ost
) != INTEGER_CST
10626 || tree_int_cst_sgn (ost
) < 0
10627 || compare_tree_int (ost
, 3) > 0)
10630 object_size_type
= tree_to_shwi (ost
);
10632 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10633 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10634 and (size_t) 0 for types 2 and 3. */
10635 if (TREE_SIDE_EFFECTS (ptr
))
10636 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10638 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10640 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10641 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10642 return build_int_cstu (size_type_node
, bytes
);
10644 else if (TREE_CODE (ptr
) == SSA_NAME
)
10646 /* If object size is not known yet, delay folding until
10647 later. Maybe subsequent passes will help determining
10649 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10650 && wi::fits_to_tree_p (bytes
, size_type_node
))
10651 return build_int_cstu (size_type_node
, bytes
);
10657 /* Builtins with folding operations that operate on "..." arguments
10658 need special handling; we need to store the arguments in a convenient
10659 data structure before attempting any folding. Fortunately there are
10660 only a few builtins that fall into this category. FNDECL is the
10661 function, EXP is the CALL_EXPR for the call. */
10664 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10666 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10667 tree ret
= NULL_TREE
;
10671 case BUILT_IN_FPCLASSIFY
:
10672 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10680 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10681 SET_EXPR_LOCATION (ret
, loc
);
10682 TREE_NO_WARNING (ret
) = 1;
10688 /* Initialize format string characters in the target charset. */
10691 init_target_chars (void)
10696 target_newline
= lang_hooks
.to_target_charset ('\n');
10697 target_percent
= lang_hooks
.to_target_charset ('%');
10698 target_c
= lang_hooks
.to_target_charset ('c');
10699 target_s
= lang_hooks
.to_target_charset ('s');
10700 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10704 target_percent_c
[0] = target_percent
;
10705 target_percent_c
[1] = target_c
;
10706 target_percent_c
[2] = '\0';
10708 target_percent_s
[0] = target_percent
;
10709 target_percent_s
[1] = target_s
;
10710 target_percent_s
[2] = '\0';
10712 target_percent_s_newline
[0] = target_percent
;
10713 target_percent_s_newline
[1] = target_s
;
10714 target_percent_s_newline
[2] = target_newline
;
10715 target_percent_s_newline
[3] = '\0';
10722 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10723 and no overflow/underflow occurred. INEXACT is true if M was not
10724 exactly calculated. TYPE is the tree type for the result. This
10725 function assumes that you cleared the MPFR flags and then
10726 calculated M to see if anything subsequently set a flag prior to
10727 entering this function. Return NULL_TREE if any checks fail. */
10730 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10732 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10733 overflow/underflow occurred. If -frounding-math, proceed iff the
10734 result of calling FUNC was exact. */
10735 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10736 && (!flag_rounding_math
|| !inexact
))
10738 REAL_VALUE_TYPE rr
;
10740 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10741 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10742 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10743 but the mpft_t is not, then we underflowed in the
10745 if (real_isfinite (&rr
)
10746 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10748 REAL_VALUE_TYPE rmode
;
10750 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10751 /* Proceed iff the specified mode can hold the value. */
10752 if (real_identical (&rmode
, &rr
))
10753 return build_real (type
, rmode
);
10759 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10760 number and no overflow/underflow occurred. INEXACT is true if M
10761 was not exactly calculated. TYPE is the tree type for the result.
10762 This function assumes that you cleared the MPFR flags and then
10763 calculated M to see if anything subsequently set a flag prior to
10764 entering this function. Return NULL_TREE if any checks fail, if
10765 FORCE_CONVERT is true, then bypass the checks. */
10768 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10770 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10771 overflow/underflow occurred. If -frounding-math, proceed iff the
10772 result of calling FUNC was exact. */
10774 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10775 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10776 && (!flag_rounding_math
|| !inexact
)))
10778 REAL_VALUE_TYPE re
, im
;
10780 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10781 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10782 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10783 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10784 but the mpft_t is not, then we underflowed in the
10787 || (real_isfinite (&re
) && real_isfinite (&im
)
10788 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10789 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10791 REAL_VALUE_TYPE re_mode
, im_mode
;
10793 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10794 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10795 /* Proceed iff the specified mode can hold the value. */
10797 || (real_identical (&re_mode
, &re
)
10798 && real_identical (&im_mode
, &im
)))
10799 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10800 build_real (TREE_TYPE (type
), im_mode
));
10806 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10807 the pointer *(ARG_QUO) and return the result. The type is taken
10808 from the type of ARG0 and is used for setting the precision of the
10809 calculation and results. */
10812 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10814 tree
const type
= TREE_TYPE (arg0
);
10815 tree result
= NULL_TREE
;
10820 /* To proceed, MPFR must exactly represent the target floating point
10821 format, which only happens when the target base equals two. */
10822 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10823 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10824 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10826 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10827 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10829 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10831 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10832 const int prec
= fmt
->p
;
10833 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10838 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10839 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10840 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10841 mpfr_clear_flags ();
10842 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10843 /* Remquo is independent of the rounding mode, so pass
10844 inexact=0 to do_mpfr_ckconv(). */
10845 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10846 mpfr_clears (m0
, m1
, NULL
);
10849 /* MPFR calculates quo in the host's long so it may
10850 return more bits in quo than the target int can hold
10851 if sizeof(host long) > sizeof(target int). This can
10852 happen even for native compilers in LP64 mode. In
10853 these cases, modulo the quo value with the largest
10854 number that the target int can hold while leaving one
10855 bit for the sign. */
10856 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10857 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10859 /* Dereference the quo pointer argument. */
10860 arg_quo
= build_fold_indirect_ref (arg_quo
);
10861 /* Proceed iff a valid pointer type was passed in. */
10862 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10864 /* Set the value. */
10866 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10867 build_int_cst (TREE_TYPE (arg_quo
),
10869 TREE_SIDE_EFFECTS (result_quo
) = 1;
10870 /* Combine the quo assignment with the rem. */
10871 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10872 result_quo
, result_rem
));
10880 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10881 resulting value as a tree with type TYPE. The mpfr precision is
10882 set to the precision of TYPE. We assume that this mpfr function
10883 returns zero if the result could be calculated exactly within the
10884 requested precision. In addition, the integer pointer represented
10885 by ARG_SG will be dereferenced and set to the appropriate signgam
10889 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10891 tree result
= NULL_TREE
;
10895 /* To proceed, MPFR must exactly represent the target floating point
10896 format, which only happens when the target base equals two. Also
10897 verify ARG is a constant and that ARG_SG is an int pointer. */
10898 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10899 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10900 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10901 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10903 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10905 /* In addition to NaN and Inf, the argument cannot be zero or a
10906 negative integer. */
10907 if (real_isfinite (ra
)
10908 && ra
->cl
!= rvc_zero
10909 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10911 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10912 const int prec
= fmt
->p
;
10913 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10918 mpfr_init2 (m
, prec
);
10919 mpfr_from_real (m
, ra
, GMP_RNDN
);
10920 mpfr_clear_flags ();
10921 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10922 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10928 /* Dereference the arg_sg pointer argument. */
10929 arg_sg
= build_fold_indirect_ref (arg_sg
);
10930 /* Assign the signgam value into *arg_sg. */
10931 result_sg
= fold_build2 (MODIFY_EXPR
,
10932 TREE_TYPE (arg_sg
), arg_sg
,
10933 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10934 TREE_SIDE_EFFECTS (result_sg
) = 1;
10935 /* Combine the signgam assignment with the lgamma result. */
10936 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10937 result_sg
, result_lg
));
10945 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10946 mpc function FUNC on it and return the resulting value as a tree
10947 with type TYPE. The mpfr precision is set to the precision of
10948 TYPE. We assume that function FUNC returns zero if the result
10949 could be calculated exactly within the requested precision. If
10950 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10951 in the arguments and/or results. */
10954 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10955 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10957 tree result
= NULL_TREE
;
10962 /* To proceed, MPFR must exactly represent the target floating point
10963 format, which only happens when the target base equals two. */
10964 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10966 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10968 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10970 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10971 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10972 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10973 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10976 || (real_isfinite (re0
) && real_isfinite (im0
)
10977 && real_isfinite (re1
) && real_isfinite (im1
)))
10979 const struct real_format
*const fmt
=
10980 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10981 const int prec
= fmt
->p
;
10982 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10983 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10987 mpc_init2 (m0
, prec
);
10988 mpc_init2 (m1
, prec
);
10989 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10990 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10991 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10992 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10993 mpfr_clear_flags ();
10994 inexact
= func (m0
, m0
, m1
, crnd
);
10995 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11004 /* A wrapper function for builtin folding that prevents warnings for
11005 "statement without effect" and the like, caused by removing the
11006 call node earlier than the warning is generated. */
11009 fold_call_stmt (gcall
*stmt
, bool ignore
)
11011 tree ret
= NULL_TREE
;
11012 tree fndecl
= gimple_call_fndecl (stmt
);
11013 location_t loc
= gimple_location (stmt
);
11014 if (fndecl
&& fndecl_built_in_p (fndecl
)
11015 && !gimple_call_va_arg_pack_p (stmt
))
11017 int nargs
= gimple_call_num_args (stmt
);
11018 tree
*args
= (nargs
> 0
11019 ? gimple_call_arg_ptr (stmt
, 0)
11020 : &error_mark_node
);
11022 if (avoid_folding_inline_builtin (fndecl
))
11024 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11026 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11030 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11033 /* Propagate location information from original call to
11034 expansion of builtin. Otherwise things like
11035 maybe_emit_chk_warning, that operate on the expansion
11036 of a builtin, will use the wrong location information. */
11037 if (gimple_has_location (stmt
))
11039 tree realret
= ret
;
11040 if (TREE_CODE (ret
) == NOP_EXPR
)
11041 realret
= TREE_OPERAND (ret
, 0);
11042 if (CAN_HAVE_LOCATION_P (realret
)
11043 && !EXPR_HAS_LOCATION (realret
))
11044 SET_EXPR_LOCATION (realret
, loc
);
11054 /* Look up the function in builtin_decl that corresponds to DECL
11055 and set ASMSPEC as its user assembler name. DECL must be a
11056 function decl that declares a builtin. */
11059 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11061 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
11064 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11065 set_user_assembler_name (builtin
, asmspec
);
11067 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
11068 && INT_TYPE_SIZE
< BITS_PER_WORD
)
11070 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
11071 set_user_assembler_libfunc ("ffs", asmspec
);
11072 set_optab_libfunc (ffs_optab
, mode
, "ffs");
11076 /* Return true if DECL is a builtin that expands to a constant or similarly
11079 is_simple_builtin (tree decl
)
11081 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
11082 switch (DECL_FUNCTION_CODE (decl
))
11084 /* Builtins that expand to constants. */
11085 case BUILT_IN_CONSTANT_P
:
11086 case BUILT_IN_EXPECT
:
11087 case BUILT_IN_OBJECT_SIZE
:
11088 case BUILT_IN_UNREACHABLE
:
11089 /* Simple register moves or loads from stack. */
11090 case BUILT_IN_ASSUME_ALIGNED
:
11091 case BUILT_IN_RETURN_ADDRESS
:
11092 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11093 case BUILT_IN_FROB_RETURN_ADDR
:
11094 case BUILT_IN_RETURN
:
11095 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11096 case BUILT_IN_FRAME_ADDRESS
:
11097 case BUILT_IN_VA_END
:
11098 case BUILT_IN_STACK_SAVE
:
11099 case BUILT_IN_STACK_RESTORE
:
11100 /* Exception state returns or moves registers around. */
11101 case BUILT_IN_EH_FILTER
:
11102 case BUILT_IN_EH_POINTER
:
11103 case BUILT_IN_EH_COPY_VALUES
:
11113 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11114 most probably expanded inline into reasonably simple code. This is a
11115 superset of is_simple_builtin. */
11117 is_inexpensive_builtin (tree decl
)
11121 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11123 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11124 switch (DECL_FUNCTION_CODE (decl
))
11127 CASE_BUILT_IN_ALLOCA
:
11128 case BUILT_IN_BSWAP16
:
11129 case BUILT_IN_BSWAP32
:
11130 case BUILT_IN_BSWAP64
:
11132 case BUILT_IN_CLZIMAX
:
11133 case BUILT_IN_CLZL
:
11134 case BUILT_IN_CLZLL
:
11136 case BUILT_IN_CTZIMAX
:
11137 case BUILT_IN_CTZL
:
11138 case BUILT_IN_CTZLL
:
11140 case BUILT_IN_FFSIMAX
:
11141 case BUILT_IN_FFSL
:
11142 case BUILT_IN_FFSLL
:
11143 case BUILT_IN_IMAXABS
:
11144 case BUILT_IN_FINITE
:
11145 case BUILT_IN_FINITEF
:
11146 case BUILT_IN_FINITEL
:
11147 case BUILT_IN_FINITED32
:
11148 case BUILT_IN_FINITED64
:
11149 case BUILT_IN_FINITED128
:
11150 case BUILT_IN_FPCLASSIFY
:
11151 case BUILT_IN_ISFINITE
:
11152 case BUILT_IN_ISINF_SIGN
:
11153 case BUILT_IN_ISINF
:
11154 case BUILT_IN_ISINFF
:
11155 case BUILT_IN_ISINFL
:
11156 case BUILT_IN_ISINFD32
:
11157 case BUILT_IN_ISINFD64
:
11158 case BUILT_IN_ISINFD128
:
11159 case BUILT_IN_ISNAN
:
11160 case BUILT_IN_ISNANF
:
11161 case BUILT_IN_ISNANL
:
11162 case BUILT_IN_ISNAND32
:
11163 case BUILT_IN_ISNAND64
:
11164 case BUILT_IN_ISNAND128
:
11165 case BUILT_IN_ISNORMAL
:
11166 case BUILT_IN_ISGREATER
:
11167 case BUILT_IN_ISGREATEREQUAL
:
11168 case BUILT_IN_ISLESS
:
11169 case BUILT_IN_ISLESSEQUAL
:
11170 case BUILT_IN_ISLESSGREATER
:
11171 case BUILT_IN_ISUNORDERED
:
11172 case BUILT_IN_VA_ARG_PACK
:
11173 case BUILT_IN_VA_ARG_PACK_LEN
:
11174 case BUILT_IN_VA_COPY
:
11175 case BUILT_IN_TRAP
:
11176 case BUILT_IN_SAVEREGS
:
11177 case BUILT_IN_POPCOUNTL
:
11178 case BUILT_IN_POPCOUNTLL
:
11179 case BUILT_IN_POPCOUNTIMAX
:
11180 case BUILT_IN_POPCOUNT
:
11181 case BUILT_IN_PARITYL
:
11182 case BUILT_IN_PARITYLL
:
11183 case BUILT_IN_PARITYIMAX
:
11184 case BUILT_IN_PARITY
:
11185 case BUILT_IN_LABS
:
11186 case BUILT_IN_LLABS
:
11187 case BUILT_IN_PREFETCH
:
11188 case BUILT_IN_ACC_ON_DEVICE
:
11192 return is_simple_builtin (decl
);
11198 /* Return true if T is a constant and the value cast to a target char
11199 can be represented by a host char.
11200 Store the casted char constant in *P if so. */
11203 target_char_cst_p (tree t
, char *p
)
11205 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
11208 *p
= (char)tree_to_uhwi (t
);
11212 /* Return the maximum object size. */
11215 max_object_size (void)
11217 /* To do: Make this a configurable parameter. */
11218 return TYPE_MAX_VALUE (ptrdiff_type_node
);