1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
64 #include "stringpool.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
71 #include "file-prefix-map.h" /* remap_macro_filename() */
72 #include "gomp-constants.h"
73 #include "omp-general.h"
75 struct target_builtins default_target_builtins
;
77 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names
[BUILT_IN_LAST
]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names
[(int) END_BUILTINS
] =
87 #include "builtins.def"
90 /* Setup an array of builtin_info_type, make sure each element decl is
91 initialized to NULL_TREE. */
92 builtin_info_type builtin_info
[(int)END_BUILTINS
];
94 /* Non-zero if __builtin_constant_p should be folded right away. */
95 bool force_folding_builtin_constant_p
;
97 static rtx
c_readstr (const char *, scalar_int_mode
);
98 static int target_char_cast (tree
, char *);
99 static rtx
get_memory_rtx (tree
, tree
);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx
result_vector (int, rtx
);
103 static void expand_builtin_prefetch (tree
);
104 static rtx
expand_builtin_apply_args (void);
105 static rtx
expand_builtin_apply_args_1 (void);
106 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
107 static void expand_builtin_return (rtx
);
108 static enum type_class
type_to_class (tree
);
109 static rtx
expand_builtin_classify_type (tree
);
110 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
113 static rtx
expand_builtin_sincos (tree
);
114 static rtx
expand_builtin_cexpi (tree
, rtx
);
115 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
116 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
117 static rtx
expand_builtin_next_arg (void);
118 static rtx
expand_builtin_va_start (tree
);
119 static rtx
expand_builtin_va_end (tree
);
120 static rtx
expand_builtin_va_copy (tree
);
121 static rtx
expand_builtin_strcmp (tree
, rtx
);
122 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
123 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
124 static rtx
expand_builtin_memchr (tree
, rtx
);
125 static rtx
expand_builtin_memcpy (tree
, rtx
);
126 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
127 rtx target
, tree exp
, int endp
);
128 static rtx
expand_builtin_memmove (tree
, rtx
);
129 static rtx
expand_builtin_mempcpy (tree
, rtx
);
130 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, int);
131 static rtx
expand_builtin_strcat (tree
, rtx
);
132 static rtx
expand_builtin_strcpy (tree
, rtx
);
133 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
134 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
135 static rtx
expand_builtin_stpncpy (tree
, rtx
);
136 static rtx
expand_builtin_strncat (tree
, rtx
);
137 static rtx
expand_builtin_strncpy (tree
, rtx
);
138 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
139 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
140 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
141 static rtx
expand_builtin_bzero (tree
);
142 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
143 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
144 static rtx
expand_builtin_alloca (tree
);
145 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
146 static rtx
expand_builtin_frame_address (tree
, tree
);
147 static tree
stabilize_va_list_loc (location_t
, tree
, int);
148 static rtx
expand_builtin_expect (tree
, rtx
);
149 static tree
fold_builtin_constant_p (tree
);
150 static tree
fold_builtin_classify_type (tree
);
151 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
152 static tree
fold_builtin_inf (location_t
, tree
, int);
153 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
154 static bool validate_arg (const_tree
, enum tree_code code
);
155 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
156 static rtx
expand_builtin_signbit (tree
, rtx
);
157 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
158 static tree
fold_builtin_isascii (location_t
, tree
);
159 static tree
fold_builtin_toascii (location_t
, tree
);
160 static tree
fold_builtin_isdigit (location_t
, tree
);
161 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
162 static tree
fold_builtin_abs (location_t
, tree
, tree
);
163 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
165 static tree
fold_builtin_0 (location_t
, tree
);
166 static tree
fold_builtin_1 (location_t
, tree
, tree
);
167 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
168 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
169 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
171 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
173 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
175 static rtx
expand_builtin_object_size (tree
);
176 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
177 enum built_in_function
);
178 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
179 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
180 static void maybe_emit_free_warning (tree
);
181 static tree
fold_builtin_object_size (tree
, tree
);
183 unsigned HOST_WIDE_INT target_newline
;
184 unsigned HOST_WIDE_INT target_percent
;
185 static unsigned HOST_WIDE_INT target_c
;
186 static unsigned HOST_WIDE_INT target_s
;
187 char target_percent_c
[3];
188 char target_percent_s
[3];
189 char target_percent_s_newline
[4];
190 static tree
do_mpfr_remquo (tree
, tree
, tree
);
191 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
192 static void expand_builtin_sync_synchronize (void);
194 /* Return true if NAME starts with __builtin_ or __sync_. */
197 is_builtin_name (const char *name
)
199 if (strncmp (name
, "__builtin_", 10) == 0)
201 if (strncmp (name
, "__sync_", 7) == 0)
203 if (strncmp (name
, "__atomic_", 9) == 0)
209 /* Return true if DECL is a function symbol representing a built-in. */
212 is_builtin_fn (tree decl
)
214 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
217 /* Return true if NODE should be considered for inline expansion regardless
218 of the optimization level. This means whenever a function is invoked with
219 its "internal" name, which normally contains the prefix "__builtin". */
222 called_as_built_in (tree node
)
224 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225 we want the name used to call the function, not the name it
227 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
228 return is_builtin_name (name
);
231 /* Compute values M and N such that M divides (address of EXP - N) and such
232 that N < M. If these numbers can be determined, store M in alignp and N in
233 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
234 *alignp and any bit-offset to *bitposp.
236 Note that the address (and thus the alignment) computed here is based
237 on the address to which a symbol resolves, whereas DECL_ALIGN is based
238 on the address at which an object is actually located. These two
239 addresses are not always the same. For example, on ARM targets,
240 the address &foo of a Thumb function foo() has the lowest bit set,
241 whereas foo() itself starts on an even address.
243 If ADDR_P is true we are taking the address of the memory reference EXP
244 and thus cannot rely on the access taking place. */
247 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
248 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
250 poly_int64 bitsize
, bitpos
;
253 int unsignedp
, reversep
, volatilep
;
254 unsigned int align
= BITS_PER_UNIT
;
255 bool known_alignment
= false;
257 /* Get the innermost object and the constant (bitpos) and possibly
258 variable (offset) offset of the access. */
259 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
260 &unsignedp
, &reversep
, &volatilep
);
262 /* Extract alignment information from the innermost object and
263 possibly adjust bitpos and offset. */
264 if (TREE_CODE (exp
) == FUNCTION_DECL
)
266 /* Function addresses can encode extra information besides their
267 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 allows the low bit to be used as a virtual bit, we know
269 that the address itself must be at least 2-byte aligned. */
270 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
271 align
= 2 * BITS_PER_UNIT
;
273 else if (TREE_CODE (exp
) == LABEL_DECL
)
275 else if (TREE_CODE (exp
) == CONST_DECL
)
277 /* The alignment of a CONST_DECL is determined by its initializer. */
278 exp
= DECL_INITIAL (exp
);
279 align
= TYPE_ALIGN (TREE_TYPE (exp
));
280 if (CONSTANT_CLASS_P (exp
))
281 align
= targetm
.constant_alignment (exp
, align
);
283 known_alignment
= true;
285 else if (DECL_P (exp
))
287 align
= DECL_ALIGN (exp
);
288 known_alignment
= true;
290 else if (TREE_CODE (exp
) == INDIRECT_REF
291 || TREE_CODE (exp
) == MEM_REF
292 || TREE_CODE (exp
) == TARGET_MEM_REF
)
294 tree addr
= TREE_OPERAND (exp
, 0);
296 unsigned HOST_WIDE_INT ptr_bitpos
;
297 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
299 /* If the address is explicitely aligned, handle that. */
300 if (TREE_CODE (addr
) == BIT_AND_EXPR
301 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
303 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
304 ptr_bitmask
*= BITS_PER_UNIT
;
305 align
= least_bit_hwi (ptr_bitmask
);
306 addr
= TREE_OPERAND (addr
, 0);
310 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
311 align
= MAX (ptr_align
, align
);
313 /* Re-apply explicit alignment to the bitpos. */
314 ptr_bitpos
&= ptr_bitmask
;
316 /* The alignment of the pointer operand in a TARGET_MEM_REF
317 has to take the variable offset parts into account. */
318 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
322 unsigned HOST_WIDE_INT step
= 1;
324 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
325 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
327 if (TMR_INDEX2 (exp
))
328 align
= BITS_PER_UNIT
;
329 known_alignment
= false;
332 /* When EXP is an actual memory reference then we can use
333 TYPE_ALIGN of a pointer indirection to derive alignment.
334 Do so only if get_pointer_alignment_1 did not reveal absolute
335 alignment knowledge and if using that alignment would
336 improve the situation. */
338 if (!addr_p
&& !known_alignment
339 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
344 /* Else adjust bitpos accordingly. */
345 bitpos
+= ptr_bitpos
;
346 if (TREE_CODE (exp
) == MEM_REF
347 || TREE_CODE (exp
) == TARGET_MEM_REF
)
348 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
351 else if (TREE_CODE (exp
) == STRING_CST
)
353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align
= TYPE_ALIGN (TREE_TYPE (exp
));
356 if (CONSTANT_CLASS_P (exp
))
357 align
= targetm
.constant_alignment (exp
, align
);
359 known_alignment
= true;
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
366 unsigned int trailing_zeros
= tree_ctz (offset
);
367 if (trailing_zeros
< HOST_BITS_PER_INT
)
369 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
371 align
= MIN (align
, inner
);
375 /* Account for the alignment of runtime coefficients, so that the constant
376 bitpos is guaranteed to be accurate. */
377 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
378 if (alt_align
!= 0 && alt_align
< align
)
381 known_alignment
= false;
385 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
386 return known_alignment
;
389 /* For a memory reference expression EXP compute values M and N such that M
390 divides (&EXP - N) and such that N < M. If these numbers can be determined,
391 store M in alignp and N in *BITPOSP and return true. Otherwise return false
392 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
396 unsigned HOST_WIDE_INT
*bitposp
)
398 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
401 /* Return the alignment in bits of EXP, an object. */
404 get_object_alignment (tree exp
)
406 unsigned HOST_WIDE_INT bitpos
= 0;
409 get_object_alignment_1 (exp
, &align
, &bitpos
);
411 /* align and bitpos now specify known low bits of the pointer.
412 ptr & (align - 1) == bitpos. */
415 align
= least_bit_hwi (bitpos
);
419 /* For a pointer valued expression EXP compute values M and N such that M
420 divides (EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Return false if
422 the results are just a conservative approximation.
424 If EXP is not a pointer, false is returned too. */
427 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
428 unsigned HOST_WIDE_INT
*bitposp
)
432 if (TREE_CODE (exp
) == ADDR_EXPR
)
433 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
434 alignp
, bitposp
, true);
435 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
438 unsigned HOST_WIDE_INT bitpos
;
439 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
441 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
442 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
445 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
446 if (trailing_zeros
< HOST_BITS_PER_INT
)
448 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
450 align
= MIN (align
, inner
);
454 *bitposp
= bitpos
& (align
- 1);
457 else if (TREE_CODE (exp
) == SSA_NAME
458 && POINTER_TYPE_P (TREE_TYPE (exp
)))
460 unsigned int ptr_align
, ptr_misalign
;
461 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
463 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
465 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
466 *alignp
= ptr_align
* BITS_PER_UNIT
;
467 /* Make sure to return a sensible alignment when the multiplication
468 by BITS_PER_UNIT overflowed. */
470 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
471 /* We cannot really tell whether this result is an approximation. */
477 *alignp
= BITS_PER_UNIT
;
481 else if (TREE_CODE (exp
) == INTEGER_CST
)
483 *alignp
= BIGGEST_ALIGNMENT
;
484 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
485 & (BIGGEST_ALIGNMENT
- 1));
490 *alignp
= BITS_PER_UNIT
;
494 /* Return the alignment in bits of EXP, a pointer valued expression.
495 The alignment returned is, by default, the alignment of the thing that
496 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
498 Otherwise, look at the expression to see if we can do better, i.e., if the
499 expression is actually pointing at an object whose alignment is tighter. */
502 get_pointer_alignment (tree exp
)
504 unsigned HOST_WIDE_INT bitpos
= 0;
507 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
509 /* align and bitpos now specify known low bits of the pointer.
510 ptr & (align - 1) == bitpos. */
513 align
= least_bit_hwi (bitpos
);
518 /* Return the number of non-zero elements in the sequence
519 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
520 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
525 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
531 /* Optimize the common case of plain char. */
532 for (n
= 0; n
< maxelts
; n
++)
534 const char *elt
= (const char*) ptr
+ n
;
541 for (n
= 0; n
< maxelts
; n
++)
543 const char *elt
= (const char*) ptr
+ n
* eltsize
;
544 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
551 /* Compute the length of a null-terminated character string or wide
552 character string handling character sizes of 1, 2, and 4 bytes.
553 TREE_STRING_LENGTH is not the right way because it evaluates to
554 the size of the character array in bytes (as opposed to characters)
555 and because it can contain a zero byte in the middle.
557 ONLY_VALUE should be nonzero if the result is not going to be emitted
558 into the instruction stream and zero if it is going to be expanded.
559 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
560 is returned, otherwise NULL, since
561 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
562 evaluate the side-effects.
564 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
565 accesses. Note that this implies the result is not going to be emitted
566 into the instruction stream.
568 The value returned is of type `ssizetype'.
570 Unfortunately, string_constant can't access the values of const char
571 arrays with initializers, so neither can we do so here. */
574 c_strlen (tree src
, int only_value
)
577 if (TREE_CODE (src
) == COND_EXPR
578 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
582 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
583 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
584 if (tree_int_cst_equal (len1
, len2
))
588 if (TREE_CODE (src
) == COMPOUND_EXPR
589 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
590 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
592 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
594 /* Offset from the beginning of the string in bytes. */
596 src
= string_constant (src
, &byteoff
);
600 /* Determine the size of the string element. */
602 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
))));
604 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
606 unsigned maxelts
= TREE_STRING_LENGTH (src
) / eltsize
- 1;
608 /* PTR can point to the byte representation of any string type, including
609 char* and wchar_t*. */
610 const char *ptr
= TREE_STRING_POINTER (src
);
612 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
614 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
615 compute the offset to the following null if we don't know where to
616 start searching for it. */
617 if (string_length (ptr
, eltsize
, maxelts
) < maxelts
)
619 /* Return when an embedded null character is found. */
624 return ssize_int (0);
626 /* We don't know the starting offset, but we do know that the string
627 has no internal zero bytes. We can assume that the offset falls
628 within the bounds of the string; otherwise, the programmer deserves
629 what he gets. Subtract the offset from the length of the string,
630 and return that. This would perhaps not be valid if we were dealing
631 with named arrays in addition to literal string constants. */
633 return size_diffop_loc (loc
, size_int (maxelts
* eltsize
), byteoff
);
636 /* Offset from the beginning of the string in elements. */
637 HOST_WIDE_INT eltoff
;
639 /* We have a known offset into the string. Start searching there for
640 a null character if we can represent it as a single HOST_WIDE_INT. */
643 else if (! tree_fits_shwi_p (byteoff
))
646 eltoff
= tree_to_shwi (byteoff
) / eltsize
;
648 /* If the offset is known to be out of bounds, warn, and call strlen at
650 if (eltoff
< 0 || eltoff
> maxelts
)
652 /* Suppress multiple warnings for propagated constant strings. */
654 && !TREE_NO_WARNING (src
))
656 warning_at (loc
, OPT_Warray_bounds
,
657 "offset %qwi outside bounds of constant string",
659 TREE_NO_WARNING (src
) = 1;
664 /* Use strlen to search for the first zero byte. Since any strings
665 constructed with build_string will have nulls appended, we win even
666 if we get handed something like (char[4])"abcd".
668 Since ELTOFF is our starting index into the string, no further
669 calculation is needed. */
670 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
673 return ssize_int (len
);
676 /* Return a constant integer corresponding to target reading
677 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
680 c_readstr (const char *str
, scalar_int_mode mode
)
684 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
686 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
687 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
688 / HOST_BITS_PER_WIDE_INT
;
690 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
691 for (i
= 0; i
< len
; i
++)
695 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
698 if (WORDS_BIG_ENDIAN
)
699 j
= GET_MODE_SIZE (mode
) - i
- 1;
700 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
701 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
702 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
706 ch
= (unsigned char) str
[i
];
707 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
710 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
711 return immed_wide_int_const (c
, mode
);
714 /* Cast a target constant CST to target CHAR and if that value fits into
715 host char type, return zero and put that value into variable pointed to by
719 target_char_cast (tree cst
, char *p
)
721 unsigned HOST_WIDE_INT val
, hostval
;
723 if (TREE_CODE (cst
) != INTEGER_CST
724 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
727 /* Do not care if it fits or not right here. */
728 val
= TREE_INT_CST_LOW (cst
);
730 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
731 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
734 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
735 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
744 /* Similar to save_expr, but assumes that arbitrary code is not executed
745 in between the multiple evaluations. In particular, we assume that a
746 non-addressable local variable will not be modified. */
749 builtin_save_expr (tree exp
)
751 if (TREE_CODE (exp
) == SSA_NAME
752 || (TREE_ADDRESSABLE (exp
) == 0
753 && (TREE_CODE (exp
) == PARM_DECL
754 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
757 return save_expr (exp
);
760 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
761 times to get the address of either a higher stack frame, or a return
762 address located within it (depending on FNDECL_CODE). */
765 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
768 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
771 /* For a zero count with __builtin_return_address, we don't care what
772 frame address we return, because target-specific definitions will
773 override us. Therefore frame pointer elimination is OK, and using
774 the soft frame pointer is OK.
776 For a nonzero count, or a zero count with __builtin_frame_address,
777 we require a stable offset from the current frame pointer to the
778 previous one, so we must use the hard frame pointer, and
779 we must disable frame pointer elimination. */
780 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
781 tem
= frame_pointer_rtx
;
784 tem
= hard_frame_pointer_rtx
;
786 /* Tell reload not to eliminate the frame pointer. */
787 crtl
->accesses_prior_frames
= 1;
792 SETUP_FRAME_ADDRESSES ();
794 /* On the SPARC, the return address is not in the frame, it is in a
795 register. There is no way to access it off of the current frame
796 pointer, but it can be accessed off the previous frame pointer by
797 reading the value from the register window save area. */
798 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
801 /* Scan back COUNT frames to the specified frame. */
802 for (i
= 0; i
< count
; i
++)
804 /* Assume the dynamic chain pointer is in the word that the
805 frame address points to, unless otherwise specified. */
806 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
807 tem
= memory_address (Pmode
, tem
);
808 tem
= gen_frame_mem (Pmode
, tem
);
809 tem
= copy_to_reg (tem
);
812 /* For __builtin_frame_address, return what we've got. But, on
813 the SPARC for example, we may have to add a bias. */
814 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
815 return FRAME_ADDR_RTX (tem
);
817 /* For __builtin_return_address, get the return address from that frame. */
818 #ifdef RETURN_ADDR_RTX
819 tem
= RETURN_ADDR_RTX (count
, tem
);
821 tem
= memory_address (Pmode
,
822 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
823 tem
= gen_frame_mem (Pmode
, tem
);
828 /* Alias set used for setjmp buffer. */
829 static alias_set_type setjmp_alias_set
= -1;
831 /* Construct the leading half of a __builtin_setjmp call. Control will
832 return to RECEIVER_LABEL. This is also called directly by the SJLJ
833 exception handling code. */
836 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
838 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
842 if (setjmp_alias_set
== -1)
843 setjmp_alias_set
= new_alias_set ();
845 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
847 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
849 /* We store the frame pointer and the address of receiver_label in
850 the buffer and use the rest of it for the stack save area, which
851 is machine-dependent. */
853 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
854 set_mem_alias_set (mem
, setjmp_alias_set
);
855 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
857 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
858 GET_MODE_SIZE (Pmode
))),
859 set_mem_alias_set (mem
, setjmp_alias_set
);
861 emit_move_insn (validize_mem (mem
),
862 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
864 stack_save
= gen_rtx_MEM (sa_mode
,
865 plus_constant (Pmode
, buf_addr
,
866 2 * GET_MODE_SIZE (Pmode
)));
867 set_mem_alias_set (stack_save
, setjmp_alias_set
);
868 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
870 /* If there is further processing to do, do it. */
871 if (targetm
.have_builtin_setjmp_setup ())
872 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
874 /* We have a nonlocal label. */
875 cfun
->has_nonlocal_label
= 1;
878 /* Construct the trailing part of a __builtin_setjmp call. This is
879 also called directly by the SJLJ exception handling code.
880 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
883 expand_builtin_setjmp_receiver (rtx receiver_label
)
887 /* Mark the FP as used when we get here, so we have to make sure it's
888 marked as used by this function. */
889 emit_use (hard_frame_pointer_rtx
);
891 /* Mark the static chain as clobbered here so life information
892 doesn't get messed up for it. */
893 chain
= rtx_for_static_chain (current_function_decl
, true);
894 if (chain
&& REG_P (chain
))
895 emit_clobber (chain
);
897 /* Now put in the code to restore the frame pointer, and argument
898 pointer, if needed. */
899 if (! targetm
.have_nonlocal_goto ())
901 /* First adjust our frame pointer to its actual value. It was
902 previously set to the start of the virtual area corresponding to
903 the stacked variables when we branched here and now needs to be
904 adjusted to the actual hardware fp value.
906 Assignments to virtual registers are converted by
907 instantiate_virtual_regs into the corresponding assignment
908 to the underlying register (fp in this case) that makes
909 the original assignment true.
910 So the following insn will actually be decrementing fp by
911 TARGET_STARTING_FRAME_OFFSET. */
912 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
914 /* Restoring the frame pointer also modifies the hard frame pointer.
915 Mark it used (so that the previous assignment remains live once
916 the frame pointer is eliminated) and clobbered (to represent the
917 implicit update from the assignment). */
918 emit_use (hard_frame_pointer_rtx
);
919 emit_clobber (hard_frame_pointer_rtx
);
922 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
924 /* If the argument pointer can be eliminated in favor of the
925 frame pointer, we don't need to restore it. We assume here
926 that if such an elimination is present, it can always be used.
927 This is the case on all known machines; if we don't make this
928 assumption, we do unnecessary saving on many machines. */
930 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
932 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
933 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
934 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
937 if (i
== ARRAY_SIZE (elim_regs
))
939 /* Now restore our arg pointer from the address at which it
940 was saved in our stack frame. */
941 emit_move_insn (crtl
->args
.internal_arg_pointer
,
942 copy_to_reg (get_arg_pointer_save_area ()));
946 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
947 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
948 else if (targetm
.have_nonlocal_goto_receiver ())
949 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
953 /* We must not allow the code we just generated to be reordered by
954 scheduling. Specifically, the update of the frame pointer must
955 happen immediately, not later. */
956 emit_insn (gen_blockage ());
959 /* __builtin_longjmp is passed a pointer to an array of five words (not
960 all will be used on all machines). It operates similarly to the C
961 library function of the same name, but is more efficient. Much of
962 the code below is copied from the handling of non-local gotos. */
965 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
968 rtx_insn
*insn
, *last
;
969 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
971 /* DRAP is needed for stack realign if longjmp is expanded to current
973 if (SUPPORTS_STACK_ALIGNMENT
)
974 crtl
->need_drap
= true;
976 if (setjmp_alias_set
== -1)
977 setjmp_alias_set
= new_alias_set ();
979 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
981 buf_addr
= force_reg (Pmode
, buf_addr
);
983 /* We require that the user must pass a second argument of 1, because
984 that is what builtin_setjmp will return. */
985 gcc_assert (value
== const1_rtx
);
987 last
= get_last_insn ();
988 if (targetm
.have_builtin_longjmp ())
989 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
992 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
993 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
994 GET_MODE_SIZE (Pmode
)));
996 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
997 2 * GET_MODE_SIZE (Pmode
)));
998 set_mem_alias_set (fp
, setjmp_alias_set
);
999 set_mem_alias_set (lab
, setjmp_alias_set
);
1000 set_mem_alias_set (stack
, setjmp_alias_set
);
1002 /* Pick up FP, label, and SP from the block and jump. This code is
1003 from expand_goto in stmt.c; see there for detailed comments. */
1004 if (targetm
.have_nonlocal_goto ())
1005 /* We have to pass a value to the nonlocal_goto pattern that will
1006 get copied into the static_chain pointer, but it does not matter
1007 what that value is, because builtin_setjmp does not use it. */
1008 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1011 lab
= copy_to_reg (lab
);
1013 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1014 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1016 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1017 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1019 emit_use (hard_frame_pointer_rtx
);
1020 emit_use (stack_pointer_rtx
);
1021 emit_indirect_jump (lab
);
1025 /* Search backwards and mark the jump insn as a non-local goto.
1026 Note that this precludes the use of __builtin_longjmp to a
1027 __builtin_setjmp target in the same function. However, we've
1028 already cautioned the user that these functions are for
1029 internal exception handling use only. */
1030 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1032 gcc_assert (insn
!= last
);
1036 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1039 else if (CALL_P (insn
))
1045 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1047 return (iter
->i
< iter
->n
);
1050 /* This function validates the types of a function call argument list
1051 against a specified list of tree_codes. If the last specifier is a 0,
1052 that represents an ellipsis, otherwise the last specifier must be a
1056 validate_arglist (const_tree callexpr
, ...)
1058 enum tree_code code
;
1061 const_call_expr_arg_iterator iter
;
1064 va_start (ap
, callexpr
);
1065 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1067 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1068 tree fn
= CALL_EXPR_FN (callexpr
);
1069 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1071 for (unsigned argno
= 1; ; ++argno
)
1073 code
= (enum tree_code
) va_arg (ap
, int);
1078 /* This signifies an ellipses, any further arguments are all ok. */
1082 /* This signifies an endlink, if no arguments remain, return
1083 true, otherwise return false. */
1084 res
= !more_const_call_expr_args_p (&iter
);
1087 /* The actual argument must be nonnull when either the whole
1088 called function has been declared nonnull, or when the formal
1089 argument corresponding to the actual argument has been. */
1091 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1093 arg
= next_const_call_expr_arg (&iter
);
1094 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1100 /* If no parameters remain or the parameter's code does not
1101 match the specified code, return false. Otherwise continue
1102 checking any remaining arguments. */
1103 arg
= next_const_call_expr_arg (&iter
);
1104 if (!validate_arg (arg
, code
))
1110 /* We need gotos here since we can only have one VA_CLOSE in a
1115 BITMAP_FREE (argmap
);
1120 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1121 and the address of the save area. */
1124 expand_builtin_nonlocal_goto (tree exp
)
1126 tree t_label
, t_save_area
;
1127 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1130 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1133 t_label
= CALL_EXPR_ARG (exp
, 0);
1134 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1136 r_label
= expand_normal (t_label
);
1137 r_label
= convert_memory_address (Pmode
, r_label
);
1138 r_save_area
= expand_normal (t_save_area
);
1139 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1140 /* Copy the address of the save location to a register just in case it was
1141 based on the frame pointer. */
1142 r_save_area
= copy_to_reg (r_save_area
);
1143 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1144 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1145 plus_constant (Pmode
, r_save_area
,
1146 GET_MODE_SIZE (Pmode
)));
1148 crtl
->has_nonlocal_goto
= 1;
1150 /* ??? We no longer need to pass the static chain value, afaik. */
1151 if (targetm
.have_nonlocal_goto ())
1152 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1155 r_label
= copy_to_reg (r_label
);
1157 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1158 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1160 /* Restore frame pointer for containing function. */
1161 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1162 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1164 /* USE of hard_frame_pointer_rtx added for consistency;
1165 not clear if really needed. */
1166 emit_use (hard_frame_pointer_rtx
);
1167 emit_use (stack_pointer_rtx
);
1169 /* If the architecture is using a GP register, we must
1170 conservatively assume that the target function makes use of it.
1171 The prologue of functions with nonlocal gotos must therefore
1172 initialize the GP register to the appropriate value, and we
1173 must then make sure that this value is live at the point
1174 of the jump. (Note that this doesn't necessarily apply
1175 to targets with a nonlocal_goto pattern; they are free
1176 to implement it in their own way. Note also that this is
1177 a no-op if the GP register is a global invariant.) */
1178 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1179 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1180 emit_use (pic_offset_table_rtx
);
1182 emit_indirect_jump (r_label
);
1185 /* Search backwards to the jump insn and mark it as a
1187 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1191 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1194 else if (CALL_P (insn
))
1201 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1202 (not all will be used on all machines) that was passed to __builtin_setjmp.
1203 It updates the stack pointer in that block to the current value. This is
1204 also called directly by the SJLJ exception handling code. */
1207 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1209 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1210 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1212 = gen_rtx_MEM (sa_mode
,
1215 plus_constant (Pmode
, buf_addr
,
1216 2 * GET_MODE_SIZE (Pmode
))));
1218 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1221 /* Expand a call to __builtin_prefetch. For a target that does not support
1222 data prefetch, evaluate the memory address argument in case it has side
1226 expand_builtin_prefetch (tree exp
)
1228 tree arg0
, arg1
, arg2
;
1232 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1235 arg0
= CALL_EXPR_ARG (exp
, 0);
1237 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1238 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1240 nargs
= call_expr_nargs (exp
);
1242 arg1
= CALL_EXPR_ARG (exp
, 1);
1244 arg1
= integer_zero_node
;
1246 arg2
= CALL_EXPR_ARG (exp
, 2);
1248 arg2
= integer_three_node
;
1250 /* Argument 0 is an address. */
1251 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1253 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1254 if (TREE_CODE (arg1
) != INTEGER_CST
)
1256 error ("second argument to %<__builtin_prefetch%> must be a constant");
1257 arg1
= integer_zero_node
;
1259 op1
= expand_normal (arg1
);
1260 /* Argument 1 must be either zero or one. */
1261 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1263 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1268 /* Argument 2 (locality) must be a compile-time constant int. */
1269 if (TREE_CODE (arg2
) != INTEGER_CST
)
1271 error ("third argument to %<__builtin_prefetch%> must be a constant");
1272 arg2
= integer_zero_node
;
1274 op2
= expand_normal (arg2
);
1275 /* Argument 2 must be 0, 1, 2, or 3. */
1276 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1278 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1282 if (targetm
.have_prefetch ())
1284 struct expand_operand ops
[3];
1286 create_address_operand (&ops
[0], op0
);
1287 create_integer_operand (&ops
[1], INTVAL (op1
));
1288 create_integer_operand (&ops
[2], INTVAL (op2
));
1289 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1293 /* Don't do anything with direct references to volatile memory, but
1294 generate code to handle other side effects. */
1295 if (!MEM_P (op0
) && side_effects_p (op0
))
1299 /* Get a MEM rtx for expression EXP which is the address of an operand
1300 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1301 the maximum length of the block of memory that might be accessed or
1305 get_memory_rtx (tree exp
, tree len
)
1307 tree orig_exp
= exp
;
1310 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1311 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1312 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1313 exp
= TREE_OPERAND (exp
, 0);
1315 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1316 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1318 /* Get an expression we can use to find the attributes to assign to MEM.
1319 First remove any nops. */
1320 while (CONVERT_EXPR_P (exp
)
1321 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1322 exp
= TREE_OPERAND (exp
, 0);
1324 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1325 (as builtin stringops may alias with anything). */
1326 exp
= fold_build2 (MEM_REF
,
1327 build_array_type (char_type_node
,
1328 build_range_type (sizetype
,
1329 size_one_node
, len
)),
1330 exp
, build_int_cst (ptr_type_node
, 0));
1332 /* If the MEM_REF has no acceptable address, try to get the base object
1333 from the original address we got, and build an all-aliasing
1334 unknown-sized access to that one. */
1335 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1336 set_mem_attributes (mem
, exp
, 0);
1337 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1338 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1341 exp
= build_fold_addr_expr (exp
);
1342 exp
= fold_build2 (MEM_REF
,
1343 build_array_type (char_type_node
,
1344 build_range_type (sizetype
,
1347 exp
, build_int_cst (ptr_type_node
, 0));
1348 set_mem_attributes (mem
, exp
, 0);
1350 set_mem_alias_set (mem
, 0);
1354 /* Built-in functions to perform an untyped call and return. */
1356 #define apply_args_mode \
1357 (this_target_builtins->x_apply_args_mode)
1358 #define apply_result_mode \
1359 (this_target_builtins->x_apply_result_mode)
1361 /* Return the size required for the block returned by __builtin_apply_args,
1362 and initialize apply_args_mode. */
1365 apply_args_size (void)
1367 static int size
= -1;
1371 /* The values computed by this function never change. */
1374 /* The first value is the incoming arg-pointer. */
1375 size
= GET_MODE_SIZE (Pmode
);
1377 /* The second value is the structure value address unless this is
1378 passed as an "invisible" first argument. */
1379 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1380 size
+= GET_MODE_SIZE (Pmode
);
1382 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1383 if (FUNCTION_ARG_REGNO_P (regno
))
1385 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1387 gcc_assert (mode
!= VOIDmode
);
1389 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1390 if (size
% align
!= 0)
1391 size
= CEIL (size
, align
) * align
;
1392 size
+= GET_MODE_SIZE (mode
);
1393 apply_args_mode
[regno
] = mode
;
1397 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1403 /* Return the size required for the block returned by __builtin_apply,
1404 and initialize apply_result_mode. */
1407 apply_result_size (void)
1409 static int size
= -1;
1412 /* The values computed by this function never change. */
1417 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1418 if (targetm
.calls
.function_value_regno_p (regno
))
1420 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1422 gcc_assert (mode
!= VOIDmode
);
1424 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1425 if (size
% align
!= 0)
1426 size
= CEIL (size
, align
) * align
;
1427 size
+= GET_MODE_SIZE (mode
);
1428 apply_result_mode
[regno
] = mode
;
1431 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1433 /* Allow targets that use untyped_call and untyped_return to override
1434 the size so that machine-specific information can be stored here. */
1435 #ifdef APPLY_RESULT_SIZE
1436 size
= APPLY_RESULT_SIZE
;
1442 /* Create a vector describing the result block RESULT. If SAVEP is true,
1443 the result block is used to save the values; otherwise it is used to
1444 restore the values. */
1447 result_vector (int savep
, rtx result
)
1449 int regno
, size
, align
, nelts
;
1450 fixed_size_mode mode
;
1452 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1455 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1456 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1458 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1459 if (size
% align
!= 0)
1460 size
= CEIL (size
, align
) * align
;
1461 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1462 mem
= adjust_address (result
, mode
, size
);
1463 savevec
[nelts
++] = (savep
1464 ? gen_rtx_SET (mem
, reg
)
1465 : gen_rtx_SET (reg
, mem
));
1466 size
+= GET_MODE_SIZE (mode
);
1468 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1471 /* Save the state required to perform an untyped call with the same
1472 arguments as were passed to the current function. */
1475 expand_builtin_apply_args_1 (void)
1478 int size
, align
, regno
;
1479 fixed_size_mode mode
;
1480 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1482 /* Create a block where the arg-pointer, structure value address,
1483 and argument registers can be saved. */
1484 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1486 /* Walk past the arg-pointer and structure value address. */
1487 size
= GET_MODE_SIZE (Pmode
);
1488 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1489 size
+= GET_MODE_SIZE (Pmode
);
1491 /* Save each register used in calling a function to the block. */
1492 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1493 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1495 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1496 if (size
% align
!= 0)
1497 size
= CEIL (size
, align
) * align
;
1499 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1501 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1502 size
+= GET_MODE_SIZE (mode
);
1505 /* Save the arg pointer to the block. */
1506 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1507 /* We need the pointer as the caller actually passed them to us, not
1508 as we might have pretended they were passed. Make sure it's a valid
1509 operand, as emit_move_insn isn't expected to handle a PLUS. */
1510 if (STACK_GROWS_DOWNWARD
)
1512 = force_operand (plus_constant (Pmode
, tem
,
1513 crtl
->args
.pretend_args_size
),
1515 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1517 size
= GET_MODE_SIZE (Pmode
);
1519 /* Save the structure value address unless this is passed as an
1520 "invisible" first argument. */
1521 if (struct_incoming_value
)
1523 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1524 copy_to_reg (struct_incoming_value
));
1525 size
+= GET_MODE_SIZE (Pmode
);
1528 /* Return the address of the block. */
1529 return copy_addr_to_reg (XEXP (registers
, 0));
1532 /* __builtin_apply_args returns block of memory allocated on
1533 the stack into which is stored the arg pointer, structure
1534 value address, static chain, and all the registers that might
1535 possibly be used in performing a function call. The code is
1536 moved to the start of the function so the incoming values are
1540 expand_builtin_apply_args (void)
1542 /* Don't do __builtin_apply_args more than once in a function.
1543 Save the result of the first call and reuse it. */
1544 if (apply_args_value
!= 0)
1545 return apply_args_value
;
1547 /* When this function is called, it means that registers must be
1548 saved on entry to this function. So we migrate the
1549 call to the first insn of this function. */
1553 temp
= expand_builtin_apply_args_1 ();
1554 rtx_insn
*seq
= get_insns ();
1557 apply_args_value
= temp
;
1559 /* Put the insns after the NOTE that starts the function.
1560 If this is inside a start_sequence, make the outer-level insn
1561 chain current, so the code is placed at the start of the
1562 function. If internal_arg_pointer is a non-virtual pseudo,
1563 it needs to be placed after the function that initializes
1565 push_topmost_sequence ();
1566 if (REG_P (crtl
->args
.internal_arg_pointer
)
1567 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1568 emit_insn_before (seq
, parm_birth_insn
);
1570 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1571 pop_topmost_sequence ();
1576 /* Perform an untyped call and save the state required to perform an
1577 untyped return of whatever value was returned by the given function. */
1580 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1582 int size
, align
, regno
;
1583 fixed_size_mode mode
;
1584 rtx incoming_args
, result
, reg
, dest
, src
;
1585 rtx_call_insn
*call_insn
;
1586 rtx old_stack_level
= 0;
1587 rtx call_fusage
= 0;
1588 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1590 arguments
= convert_memory_address (Pmode
, arguments
);
1592 /* Create a block where the return registers can be saved. */
1593 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1595 /* Fetch the arg pointer from the ARGUMENTS block. */
1596 incoming_args
= gen_reg_rtx (Pmode
);
1597 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1598 if (!STACK_GROWS_DOWNWARD
)
1599 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1600 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1602 /* Push a new argument block and copy the arguments. Do not allow
1603 the (potential) memcpy call below to interfere with our stack
1605 do_pending_stack_adjust ();
1608 /* Save the stack with nonlocal if available. */
1609 if (targetm
.have_save_stack_nonlocal ())
1610 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1612 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1614 /* Allocate a block of memory onto the stack and copy the memory
1615 arguments to the outgoing arguments address. We can pass TRUE
1616 as the 4th argument because we just saved the stack pointer
1617 and will restore it right after the call. */
1618 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1620 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1621 may have already set current_function_calls_alloca to true.
1622 current_function_calls_alloca won't be set if argsize is zero,
1623 so we have to guarantee need_drap is true here. */
1624 if (SUPPORTS_STACK_ALIGNMENT
)
1625 crtl
->need_drap
= true;
1627 dest
= virtual_outgoing_args_rtx
;
1628 if (!STACK_GROWS_DOWNWARD
)
1630 if (CONST_INT_P (argsize
))
1631 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1633 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1635 dest
= gen_rtx_MEM (BLKmode
, dest
);
1636 set_mem_align (dest
, PARM_BOUNDARY
);
1637 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1638 set_mem_align (src
, PARM_BOUNDARY
);
1639 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1641 /* Refer to the argument block. */
1643 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1644 set_mem_align (arguments
, PARM_BOUNDARY
);
1646 /* Walk past the arg-pointer and structure value address. */
1647 size
= GET_MODE_SIZE (Pmode
);
1649 size
+= GET_MODE_SIZE (Pmode
);
1651 /* Restore each of the registers previously saved. Make USE insns
1652 for each of these registers for use in making the call. */
1653 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1654 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1656 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1657 if (size
% align
!= 0)
1658 size
= CEIL (size
, align
) * align
;
1659 reg
= gen_rtx_REG (mode
, regno
);
1660 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1661 use_reg (&call_fusage
, reg
);
1662 size
+= GET_MODE_SIZE (mode
);
1665 /* Restore the structure value address unless this is passed as an
1666 "invisible" first argument. */
1667 size
= GET_MODE_SIZE (Pmode
);
1670 rtx value
= gen_reg_rtx (Pmode
);
1671 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1672 emit_move_insn (struct_value
, value
);
1673 if (REG_P (struct_value
))
1674 use_reg (&call_fusage
, struct_value
);
1675 size
+= GET_MODE_SIZE (Pmode
);
1678 /* All arguments and registers used for the call are set up by now! */
1679 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1681 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1682 and we don't want to load it into a register as an optimization,
1683 because prepare_call_address already did it if it should be done. */
1684 if (GET_CODE (function
) != SYMBOL_REF
)
1685 function
= memory_address (FUNCTION_MODE
, function
);
1687 /* Generate the actual call instruction and save the return value. */
1688 if (targetm
.have_untyped_call ())
1690 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1691 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1692 result_vector (1, result
)));
1694 else if (targetm
.have_call_value ())
1698 /* Locate the unique return register. It is not possible to
1699 express a call that sets more than one return register using
1700 call_value; use untyped_call for that. In fact, untyped_call
1701 only needs to save the return registers in the given block. */
1702 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1703 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1705 gcc_assert (!valreg
); /* have_untyped_call required. */
1707 valreg
= gen_rtx_REG (mode
, regno
);
1710 emit_insn (targetm
.gen_call_value (valreg
,
1711 gen_rtx_MEM (FUNCTION_MODE
, function
),
1712 const0_rtx
, NULL_RTX
, const0_rtx
));
1714 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1719 /* Find the CALL insn we just emitted, and attach the register usage
1721 call_insn
= last_call_insn ();
1722 add_function_usage_to (call_insn
, call_fusage
);
1724 /* Restore the stack. */
1725 if (targetm
.have_save_stack_nonlocal ())
1726 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1728 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1729 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1733 /* Return the address of the result block. */
1734 result
= copy_addr_to_reg (XEXP (result
, 0));
1735 return convert_memory_address (ptr_mode
, result
);
1738 /* Perform an untyped return. */
1741 expand_builtin_return (rtx result
)
1743 int size
, align
, regno
;
1744 fixed_size_mode mode
;
1746 rtx_insn
*call_fusage
= 0;
1748 result
= convert_memory_address (Pmode
, result
);
1750 apply_result_size ();
1751 result
= gen_rtx_MEM (BLKmode
, result
);
1753 if (targetm
.have_untyped_return ())
1755 rtx vector
= result_vector (0, result
);
1756 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1761 /* Restore the return value and note that each value is used. */
1763 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1764 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1766 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1767 if (size
% align
!= 0)
1768 size
= CEIL (size
, align
) * align
;
1769 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1770 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1772 push_to_sequence (call_fusage
);
1774 call_fusage
= get_insns ();
1776 size
+= GET_MODE_SIZE (mode
);
1779 /* Put the USE insns before the return. */
1780 emit_insn (call_fusage
);
1782 /* Return whatever values was restored by jumping directly to the end
1784 expand_naked_return ();
1787 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1789 static enum type_class
1790 type_to_class (tree type
)
1792 switch (TREE_CODE (type
))
1794 case VOID_TYPE
: return void_type_class
;
1795 case INTEGER_TYPE
: return integer_type_class
;
1796 case ENUMERAL_TYPE
: return enumeral_type_class
;
1797 case BOOLEAN_TYPE
: return boolean_type_class
;
1798 case POINTER_TYPE
: return pointer_type_class
;
1799 case REFERENCE_TYPE
: return reference_type_class
;
1800 case OFFSET_TYPE
: return offset_type_class
;
1801 case REAL_TYPE
: return real_type_class
;
1802 case COMPLEX_TYPE
: return complex_type_class
;
1803 case FUNCTION_TYPE
: return function_type_class
;
1804 case METHOD_TYPE
: return method_type_class
;
1805 case RECORD_TYPE
: return record_type_class
;
1807 case QUAL_UNION_TYPE
: return union_type_class
;
1808 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1809 ? string_type_class
: array_type_class
);
1810 case LANG_TYPE
: return lang_type_class
;
1811 default: return no_type_class
;
1815 /* Expand a call EXP to __builtin_classify_type. */
1818 expand_builtin_classify_type (tree exp
)
1820 if (call_expr_nargs (exp
))
1821 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1822 return GEN_INT (no_type_class
);
1825 /* This helper macro, meant to be used in mathfn_built_in below, determines
1826 which among a set of builtin math functions is appropriate for a given type
1827 mode. The `F' (float) and `L' (long double) are automatically generated
1828 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1829 types, there are additional types that are considered with 'F32', 'F64',
1830 'F128', etc. suffixes. */
1831 #define CASE_MATHFN(MATHFN) \
1832 CASE_CFN_##MATHFN: \
1833 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1834 fcodel = BUILT_IN_##MATHFN##L ; break;
1835 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1837 #define CASE_MATHFN_FLOATN(MATHFN) \
1838 CASE_CFN_##MATHFN: \
1839 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1840 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1841 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1842 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1843 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1845 /* Similar to above, but appends _R after any F/L suffix. */
1846 #define CASE_MATHFN_REENT(MATHFN) \
1847 case CFN_BUILT_IN_##MATHFN##_R: \
1848 case CFN_BUILT_IN_##MATHFN##F_R: \
1849 case CFN_BUILT_IN_##MATHFN##L_R: \
1850 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1851 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1853 /* Return a function equivalent to FN but operating on floating-point
1854 values of type TYPE, or END_BUILTINS if no such function exists.
1855 This is purely an operation on function codes; it does not guarantee
1856 that the target actually has an implementation of the function. */
1858 static built_in_function
1859 mathfn_built_in_2 (tree type
, combined_fn fn
)
1862 built_in_function fcode
, fcodef
, fcodel
;
1863 built_in_function fcodef16
= END_BUILTINS
;
1864 built_in_function fcodef32
= END_BUILTINS
;
1865 built_in_function fcodef64
= END_BUILTINS
;
1866 built_in_function fcodef128
= END_BUILTINS
;
1867 built_in_function fcodef32x
= END_BUILTINS
;
1868 built_in_function fcodef64x
= END_BUILTINS
;
1869 built_in_function fcodef128x
= END_BUILTINS
;
1881 CASE_MATHFN_FLOATN (CEIL
)
1883 CASE_MATHFN_FLOATN (COPYSIGN
)
1895 CASE_MATHFN_FLOATN (FLOOR
)
1896 CASE_MATHFN_FLOATN (FMA
)
1897 CASE_MATHFN_FLOATN (FMAX
)
1898 CASE_MATHFN_FLOATN (FMIN
)
1902 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1903 CASE_MATHFN (HUGE_VAL
)
1907 CASE_MATHFN (IFLOOR
)
1910 CASE_MATHFN (IROUND
)
1917 CASE_MATHFN (LFLOOR
)
1918 CASE_MATHFN (LGAMMA
)
1919 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1920 CASE_MATHFN (LLCEIL
)
1921 CASE_MATHFN (LLFLOOR
)
1922 CASE_MATHFN (LLRINT
)
1923 CASE_MATHFN (LLROUND
)
1930 CASE_MATHFN (LROUND
)
1934 CASE_MATHFN_FLOATN (NEARBYINT
)
1935 CASE_MATHFN (NEXTAFTER
)
1936 CASE_MATHFN (NEXTTOWARD
)
1940 CASE_MATHFN (REMAINDER
)
1941 CASE_MATHFN (REMQUO
)
1942 CASE_MATHFN_FLOATN (RINT
)
1943 CASE_MATHFN_FLOATN (ROUND
)
1945 CASE_MATHFN (SCALBLN
)
1946 CASE_MATHFN (SCALBN
)
1947 CASE_MATHFN (SIGNBIT
)
1948 CASE_MATHFN (SIGNIFICAND
)
1950 CASE_MATHFN (SINCOS
)
1952 CASE_MATHFN_FLOATN (SQRT
)
1955 CASE_MATHFN (TGAMMA
)
1956 CASE_MATHFN_FLOATN (TRUNC
)
1962 return END_BUILTINS
;
1965 mtype
= TYPE_MAIN_VARIANT (type
);
1966 if (mtype
== double_type_node
)
1968 else if (mtype
== float_type_node
)
1970 else if (mtype
== long_double_type_node
)
1972 else if (mtype
== float16_type_node
)
1974 else if (mtype
== float32_type_node
)
1976 else if (mtype
== float64_type_node
)
1978 else if (mtype
== float128_type_node
)
1980 else if (mtype
== float32x_type_node
)
1982 else if (mtype
== float64x_type_node
)
1984 else if (mtype
== float128x_type_node
)
1987 return END_BUILTINS
;
1990 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1991 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1992 otherwise use the explicit declaration. If we can't do the conversion,
1996 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
1998 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
1999 if (fcode2
== END_BUILTINS
)
2002 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2005 return builtin_decl_explicit (fcode2
);
2008 /* Like mathfn_built_in_1, but always use the implicit array. */
2011 mathfn_built_in (tree type
, combined_fn fn
)
2013 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2016 /* Like mathfn_built_in_1, but take a built_in_function and
2017 always use the implicit array. */
2020 mathfn_built_in (tree type
, enum built_in_function fn
)
2022 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2025 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2026 return its code, otherwise return IFN_LAST. Note that this function
2027 only tests whether the function is defined in internals.def, not whether
2028 it is actually available on the target. */
2031 associated_internal_fn (tree fndecl
)
2033 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2034 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2035 switch (DECL_FUNCTION_CODE (fndecl
))
2037 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2038 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2039 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2040 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2041 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2042 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2043 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2044 #include "internal-fn.def"
2046 CASE_FLT_FN (BUILT_IN_POW10
):
2049 CASE_FLT_FN (BUILT_IN_DREM
):
2050 return IFN_REMAINDER
;
2052 CASE_FLT_FN (BUILT_IN_SCALBN
):
2053 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2054 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2063 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2064 on the current target by a call to an internal function, return the
2065 code of that internal function, otherwise return IFN_LAST. The caller
2066 is responsible for ensuring that any side-effects of the built-in
2067 call are dealt with correctly. E.g. if CALL sets errno, the caller
2068 must decide that the errno result isn't needed or make it available
2069 in some other way. */
2072 replacement_internal_fn (gcall
*call
)
2074 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2076 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2077 if (ifn
!= IFN_LAST
)
2079 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2080 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2081 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2088 /* Expand a call to the builtin trinary math functions (fma).
2089 Return NULL_RTX if a normal call should be emitted rather than expanding the
2090 function in-line. EXP is the expression that is a call to the builtin
2091 function; if convenient, the result should be placed in TARGET.
2092 SUBTARGET may be used as the target for computing one of EXP's
2096 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2098 optab builtin_optab
;
2099 rtx op0
, op1
, op2
, result
;
2101 tree fndecl
= get_callee_fndecl (exp
);
2102 tree arg0
, arg1
, arg2
;
2105 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2108 arg0
= CALL_EXPR_ARG (exp
, 0);
2109 arg1
= CALL_EXPR_ARG (exp
, 1);
2110 arg2
= CALL_EXPR_ARG (exp
, 2);
2112 switch (DECL_FUNCTION_CODE (fndecl
))
2114 CASE_FLT_FN (BUILT_IN_FMA
):
2115 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2116 builtin_optab
= fma_optab
; break;
2121 /* Make a suitable register to place result in. */
2122 mode
= TYPE_MODE (TREE_TYPE (exp
));
2124 /* Before working hard, check whether the instruction is available. */
2125 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2128 result
= gen_reg_rtx (mode
);
2130 /* Always stabilize the argument list. */
2131 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2132 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2133 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2135 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2136 op1
= expand_normal (arg1
);
2137 op2
= expand_normal (arg2
);
2141 /* Compute into RESULT.
2142 Set RESULT to wherever the result comes back. */
2143 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2146 /* If we were unable to expand via the builtin, stop the sequence
2147 (without outputting the insns) and call to the library function
2148 with the stabilized argument list. */
2152 return expand_call (exp
, target
, target
== const0_rtx
);
2155 /* Output the entire sequence. */
2156 insns
= get_insns ();
2163 /* Expand a call to the builtin sin and cos math functions.
2164 Return NULL_RTX if a normal call should be emitted rather than expanding the
2165 function in-line. EXP is the expression that is a call to the builtin
2166 function; if convenient, the result should be placed in TARGET.
2167 SUBTARGET may be used as the target for computing one of EXP's
2171 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2173 optab builtin_optab
;
2176 tree fndecl
= get_callee_fndecl (exp
);
2180 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2183 arg
= CALL_EXPR_ARG (exp
, 0);
2185 switch (DECL_FUNCTION_CODE (fndecl
))
2187 CASE_FLT_FN (BUILT_IN_SIN
):
2188 CASE_FLT_FN (BUILT_IN_COS
):
2189 builtin_optab
= sincos_optab
; break;
2194 /* Make a suitable register to place result in. */
2195 mode
= TYPE_MODE (TREE_TYPE (exp
));
2197 /* Check if sincos insn is available, otherwise fallback
2198 to sin or cos insn. */
2199 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2200 switch (DECL_FUNCTION_CODE (fndecl
))
2202 CASE_FLT_FN (BUILT_IN_SIN
):
2203 builtin_optab
= sin_optab
; break;
2204 CASE_FLT_FN (BUILT_IN_COS
):
2205 builtin_optab
= cos_optab
; break;
2210 /* Before working hard, check whether the instruction is available. */
2211 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2213 rtx result
= gen_reg_rtx (mode
);
2215 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2216 need to expand the argument again. This way, we will not perform
2217 side-effects more the once. */
2218 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2220 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2224 /* Compute into RESULT.
2225 Set RESULT to wherever the result comes back. */
2226 if (builtin_optab
== sincos_optab
)
2230 switch (DECL_FUNCTION_CODE (fndecl
))
2232 CASE_FLT_FN (BUILT_IN_SIN
):
2233 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2235 CASE_FLT_FN (BUILT_IN_COS
):
2236 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2244 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2248 /* Output the entire sequence. */
2249 insns
= get_insns ();
2255 /* If we were unable to expand via the builtin, stop the sequence
2256 (without outputting the insns) and call to the library function
2257 with the stabilized argument list. */
2261 return expand_call (exp
, target
, target
== const0_rtx
);
2264 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2265 return an RTL instruction code that implements the functionality.
2266 If that isn't possible or available return CODE_FOR_nothing. */
2268 static enum insn_code
2269 interclass_mathfn_icode (tree arg
, tree fndecl
)
2271 bool errno_set
= false;
2272 optab builtin_optab
= unknown_optab
;
2275 switch (DECL_FUNCTION_CODE (fndecl
))
2277 CASE_FLT_FN (BUILT_IN_ILOGB
):
2278 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2279 CASE_FLT_FN (BUILT_IN_ISINF
):
2280 builtin_optab
= isinf_optab
; break;
2281 case BUILT_IN_ISNORMAL
:
2282 case BUILT_IN_ISFINITE
:
2283 CASE_FLT_FN (BUILT_IN_FINITE
):
2284 case BUILT_IN_FINITED32
:
2285 case BUILT_IN_FINITED64
:
2286 case BUILT_IN_FINITED128
:
2287 case BUILT_IN_ISINFD32
:
2288 case BUILT_IN_ISINFD64
:
2289 case BUILT_IN_ISINFD128
:
2290 /* These builtins have no optabs (yet). */
2296 /* There's no easy way to detect the case we need to set EDOM. */
2297 if (flag_errno_math
&& errno_set
)
2298 return CODE_FOR_nothing
;
2300 /* Optab mode depends on the mode of the input argument. */
2301 mode
= TYPE_MODE (TREE_TYPE (arg
));
2304 return optab_handler (builtin_optab
, mode
);
2305 return CODE_FOR_nothing
;
2308 /* Expand a call to one of the builtin math functions that operate on
2309 floating point argument and output an integer result (ilogb, isinf,
2311 Return 0 if a normal call should be emitted rather than expanding the
2312 function in-line. EXP is the expression that is a call to the builtin
2313 function; if convenient, the result should be placed in TARGET. */
2316 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2318 enum insn_code icode
= CODE_FOR_nothing
;
2320 tree fndecl
= get_callee_fndecl (exp
);
2324 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2327 arg
= CALL_EXPR_ARG (exp
, 0);
2328 icode
= interclass_mathfn_icode (arg
, fndecl
);
2329 mode
= TYPE_MODE (TREE_TYPE (arg
));
2331 if (icode
!= CODE_FOR_nothing
)
2333 struct expand_operand ops
[1];
2334 rtx_insn
*last
= get_last_insn ();
2335 tree orig_arg
= arg
;
2337 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2338 need to expand the argument again. This way, we will not perform
2339 side-effects more the once. */
2340 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2342 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2344 if (mode
!= GET_MODE (op0
))
2345 op0
= convert_to_mode (mode
, op0
, 0);
2347 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2348 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2349 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2350 return ops
[0].value
;
2352 delete_insns_since (last
);
2353 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2359 /* Expand a call to the builtin sincos math function.
2360 Return NULL_RTX if a normal call should be emitted rather than expanding the
2361 function in-line. EXP is the expression that is a call to the builtin
2365 expand_builtin_sincos (tree exp
)
2367 rtx op0
, op1
, op2
, target1
, target2
;
2369 tree arg
, sinp
, cosp
;
2371 location_t loc
= EXPR_LOCATION (exp
);
2372 tree alias_type
, alias_off
;
2374 if (!validate_arglist (exp
, REAL_TYPE
,
2375 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2378 arg
= CALL_EXPR_ARG (exp
, 0);
2379 sinp
= CALL_EXPR_ARG (exp
, 1);
2380 cosp
= CALL_EXPR_ARG (exp
, 2);
2382 /* Make a suitable register to place result in. */
2383 mode
= TYPE_MODE (TREE_TYPE (arg
));
2385 /* Check if sincos insn is available, otherwise emit the call. */
2386 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2389 target1
= gen_reg_rtx (mode
);
2390 target2
= gen_reg_rtx (mode
);
2392 op0
= expand_normal (arg
);
2393 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2394 alias_off
= build_int_cst (alias_type
, 0);
2395 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2397 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2400 /* Compute into target1 and target2.
2401 Set TARGET to wherever the result comes back. */
2402 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2403 gcc_assert (result
);
2405 /* Move target1 and target2 to the memory locations indicated
2407 emit_move_insn (op1
, target1
);
2408 emit_move_insn (op2
, target2
);
2413 /* Expand a call to the internal cexpi builtin to the sincos math function.
2414 EXP is the expression that is a call to the builtin function; if convenient,
2415 the result should be placed in TARGET. */
2418 expand_builtin_cexpi (tree exp
, rtx target
)
2420 tree fndecl
= get_callee_fndecl (exp
);
2424 location_t loc
= EXPR_LOCATION (exp
);
2426 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2429 arg
= CALL_EXPR_ARG (exp
, 0);
2430 type
= TREE_TYPE (arg
);
2431 mode
= TYPE_MODE (TREE_TYPE (arg
));
2433 /* Try expanding via a sincos optab, fall back to emitting a libcall
2434 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2435 is only generated from sincos, cexp or if we have either of them. */
2436 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2438 op1
= gen_reg_rtx (mode
);
2439 op2
= gen_reg_rtx (mode
);
2441 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2443 /* Compute into op1 and op2. */
2444 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2446 else if (targetm
.libc_has_function (function_sincos
))
2448 tree call
, fn
= NULL_TREE
;
2452 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2453 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2454 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2455 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2456 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2457 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2461 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2462 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2463 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2464 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2465 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2466 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2468 /* Make sure not to fold the sincos call again. */
2469 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2470 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2471 call
, 3, arg
, top1
, top2
));
2475 tree call
, fn
= NULL_TREE
, narg
;
2476 tree ctype
= build_complex_type (type
);
2478 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2479 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2480 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2481 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2482 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2483 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2487 /* If we don't have a decl for cexp create one. This is the
2488 friendliest fallback if the user calls __builtin_cexpi
2489 without full target C99 function support. */
2490 if (fn
== NULL_TREE
)
2493 const char *name
= NULL
;
2495 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2497 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2499 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2502 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2503 fn
= build_fn_decl (name
, fntype
);
2506 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2507 build_real (type
, dconst0
), arg
);
2509 /* Make sure not to fold the cexp call again. */
2510 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2511 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2512 target
, VOIDmode
, EXPAND_NORMAL
);
2515 /* Now build the proper return type. */
2516 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2517 make_tree (TREE_TYPE (arg
), op2
),
2518 make_tree (TREE_TYPE (arg
), op1
)),
2519 target
, VOIDmode
, EXPAND_NORMAL
);
2522 /* Conveniently construct a function call expression. FNDECL names the
2523 function to be called, N is the number of arguments, and the "..."
2524 parameters are the argument expressions. Unlike build_call_exr
2525 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2528 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2531 tree fntype
= TREE_TYPE (fndecl
);
2532 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2535 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2537 SET_EXPR_LOCATION (fn
, loc
);
2541 /* Expand a call to one of the builtin rounding functions gcc defines
2542 as an extension (lfloor and lceil). As these are gcc extensions we
2543 do not need to worry about setting errno to EDOM.
2544 If expanding via optab fails, lower expression to (int)(floor(x)).
2545 EXP is the expression that is a call to the builtin function;
2546 if convenient, the result should be placed in TARGET. */
2549 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2551 convert_optab builtin_optab
;
2554 tree fndecl
= get_callee_fndecl (exp
);
2555 enum built_in_function fallback_fn
;
2556 tree fallback_fndecl
;
2560 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2563 arg
= CALL_EXPR_ARG (exp
, 0);
2565 switch (DECL_FUNCTION_CODE (fndecl
))
2567 CASE_FLT_FN (BUILT_IN_ICEIL
):
2568 CASE_FLT_FN (BUILT_IN_LCEIL
):
2569 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2570 builtin_optab
= lceil_optab
;
2571 fallback_fn
= BUILT_IN_CEIL
;
2574 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2575 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2576 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2577 builtin_optab
= lfloor_optab
;
2578 fallback_fn
= BUILT_IN_FLOOR
;
2585 /* Make a suitable register to place result in. */
2586 mode
= TYPE_MODE (TREE_TYPE (exp
));
2588 target
= gen_reg_rtx (mode
);
2590 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2591 need to expand the argument again. This way, we will not perform
2592 side-effects more the once. */
2593 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2595 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2599 /* Compute into TARGET. */
2600 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2602 /* Output the entire sequence. */
2603 insns
= get_insns ();
2609 /* If we were unable to expand via the builtin, stop the sequence
2610 (without outputting the insns). */
2613 /* Fall back to floating point rounding optab. */
2614 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2616 /* For non-C99 targets we may end up without a fallback fndecl here
2617 if the user called __builtin_lfloor directly. In this case emit
2618 a call to the floor/ceil variants nevertheless. This should result
2619 in the best user experience for not full C99 targets. */
2620 if (fallback_fndecl
== NULL_TREE
)
2623 const char *name
= NULL
;
2625 switch (DECL_FUNCTION_CODE (fndecl
))
2627 case BUILT_IN_ICEIL
:
2628 case BUILT_IN_LCEIL
:
2629 case BUILT_IN_LLCEIL
:
2632 case BUILT_IN_ICEILF
:
2633 case BUILT_IN_LCEILF
:
2634 case BUILT_IN_LLCEILF
:
2637 case BUILT_IN_ICEILL
:
2638 case BUILT_IN_LCEILL
:
2639 case BUILT_IN_LLCEILL
:
2642 case BUILT_IN_IFLOOR
:
2643 case BUILT_IN_LFLOOR
:
2644 case BUILT_IN_LLFLOOR
:
2647 case BUILT_IN_IFLOORF
:
2648 case BUILT_IN_LFLOORF
:
2649 case BUILT_IN_LLFLOORF
:
2652 case BUILT_IN_IFLOORL
:
2653 case BUILT_IN_LFLOORL
:
2654 case BUILT_IN_LLFLOORL
:
2661 fntype
= build_function_type_list (TREE_TYPE (arg
),
2662 TREE_TYPE (arg
), NULL_TREE
);
2663 fallback_fndecl
= build_fn_decl (name
, fntype
);
2666 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2668 tmp
= expand_normal (exp
);
2669 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2671 /* Truncate the result of floating point optab to integer
2672 via expand_fix (). */
2673 target
= gen_reg_rtx (mode
);
2674 expand_fix (target
, tmp
, 0);
2679 /* Expand a call to one of the builtin math functions doing integer
2681 Return 0 if a normal call should be emitted rather than expanding the
2682 function in-line. EXP is the expression that is a call to the builtin
2683 function; if convenient, the result should be placed in TARGET. */
2686 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2688 convert_optab builtin_optab
;
2691 tree fndecl
= get_callee_fndecl (exp
);
2694 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2696 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2699 arg
= CALL_EXPR_ARG (exp
, 0);
2701 switch (DECL_FUNCTION_CODE (fndecl
))
2703 CASE_FLT_FN (BUILT_IN_IRINT
):
2704 fallback_fn
= BUILT_IN_LRINT
;
2706 CASE_FLT_FN (BUILT_IN_LRINT
):
2707 CASE_FLT_FN (BUILT_IN_LLRINT
):
2708 builtin_optab
= lrint_optab
;
2711 CASE_FLT_FN (BUILT_IN_IROUND
):
2712 fallback_fn
= BUILT_IN_LROUND
;
2714 CASE_FLT_FN (BUILT_IN_LROUND
):
2715 CASE_FLT_FN (BUILT_IN_LLROUND
):
2716 builtin_optab
= lround_optab
;
2723 /* There's no easy way to detect the case we need to set EDOM. */
2724 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2727 /* Make a suitable register to place result in. */
2728 mode
= TYPE_MODE (TREE_TYPE (exp
));
2730 /* There's no easy way to detect the case we need to set EDOM. */
2731 if (!flag_errno_math
)
2733 rtx result
= gen_reg_rtx (mode
);
2735 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2736 need to expand the argument again. This way, we will not perform
2737 side-effects more the once. */
2738 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2740 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2744 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2746 /* Output the entire sequence. */
2747 insns
= get_insns ();
2753 /* If we were unable to expand via the builtin, stop the sequence
2754 (without outputting the insns) and call to the library function
2755 with the stabilized argument list. */
2759 if (fallback_fn
!= BUILT_IN_NONE
)
2761 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2762 targets, (int) round (x) should never be transformed into
2763 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2764 a call to lround in the hope that the target provides at least some
2765 C99 functions. This should result in the best user experience for
2766 not full C99 targets. */
2767 tree fallback_fndecl
= mathfn_built_in_1
2768 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2770 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2771 fallback_fndecl
, 1, arg
);
2773 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2774 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2775 return convert_to_mode (mode
, target
, 0);
2778 return expand_call (exp
, target
, target
== const0_rtx
);
2781 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2782 a normal call should be emitted rather than expanding the function
2783 in-line. EXP is the expression that is a call to the builtin
2784 function; if convenient, the result should be placed in TARGET. */
2787 expand_builtin_powi (tree exp
, rtx target
)
2794 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2797 arg0
= CALL_EXPR_ARG (exp
, 0);
2798 arg1
= CALL_EXPR_ARG (exp
, 1);
2799 mode
= TYPE_MODE (TREE_TYPE (exp
));
2801 /* Emit a libcall to libgcc. */
2803 /* Mode of the 2nd argument must match that of an int. */
2804 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2806 if (target
== NULL_RTX
)
2807 target
= gen_reg_rtx (mode
);
2809 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2810 if (GET_MODE (op0
) != mode
)
2811 op0
= convert_to_mode (mode
, op0
, 0);
2812 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2813 if (GET_MODE (op1
) != mode2
)
2814 op1
= convert_to_mode (mode2
, op1
, 0);
2816 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2817 target
, LCT_CONST
, mode
,
2818 op0
, mode
, op1
, mode2
);
2823 /* Expand expression EXP which is a call to the strlen builtin. Return
2824 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2825 try to get the result in TARGET, if convenient. */
2828 expand_builtin_strlen (tree exp
, rtx target
,
2829 machine_mode target_mode
)
2831 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2834 struct expand_operand ops
[4];
2837 tree src
= CALL_EXPR_ARG (exp
, 0);
2839 rtx_insn
*before_strlen
;
2840 machine_mode insn_mode
;
2841 enum insn_code icode
= CODE_FOR_nothing
;
2844 /* If the length can be computed at compile-time, return it. */
2845 len
= c_strlen (src
, 0);
2847 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2849 /* If the length can be computed at compile-time and is constant
2850 integer, but there are side-effects in src, evaluate
2851 src for side-effects, then return len.
2852 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2853 can be optimized into: i++; x = 3; */
2854 len
= c_strlen (src
, 1);
2855 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2857 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2858 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2861 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2863 /* If SRC is not a pointer type, don't do this operation inline. */
2867 /* Bail out if we can't compute strlen in the right mode. */
2868 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2870 icode
= optab_handler (strlen_optab
, insn_mode
);
2871 if (icode
!= CODE_FOR_nothing
)
2874 if (insn_mode
== VOIDmode
)
2877 /* Make a place to hold the source address. We will not expand
2878 the actual source until we are sure that the expansion will
2879 not fail -- there are trees that cannot be expanded twice. */
2880 src_reg
= gen_reg_rtx (Pmode
);
2882 /* Mark the beginning of the strlen sequence so we can emit the
2883 source operand later. */
2884 before_strlen
= get_last_insn ();
2886 create_output_operand (&ops
[0], target
, insn_mode
);
2887 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2888 create_integer_operand (&ops
[2], 0);
2889 create_integer_operand (&ops
[3], align
);
2890 if (!maybe_expand_insn (icode
, 4, ops
))
2893 /* Check to see if the argument was declared attribute nonstring
2894 and if so, issue a warning since at this point it's not known
2895 to be nul-terminated. */
2896 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
2898 /* Now that we are assured of success, expand the source. */
2900 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2903 #ifdef POINTERS_EXTEND_UNSIGNED
2904 if (GET_MODE (pat
) != Pmode
)
2905 pat
= convert_to_mode (Pmode
, pat
,
2906 POINTERS_EXTEND_UNSIGNED
);
2908 emit_move_insn (src_reg
, pat
);
2914 emit_insn_after (pat
, before_strlen
);
2916 emit_insn_before (pat
, get_insns ());
2918 /* Return the value in the proper mode for this function. */
2919 if (GET_MODE (ops
[0].value
) == target_mode
)
2920 target
= ops
[0].value
;
2921 else if (target
!= 0)
2922 convert_move (target
, ops
[0].value
, 0);
2924 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2929 /* Expand call EXP to the strnlen built-in, returning the result
2930 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2933 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
2935 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2938 tree src
= CALL_EXPR_ARG (exp
, 0);
2939 tree bound
= CALL_EXPR_ARG (exp
, 1);
2944 location_t loc
= UNKNOWN_LOCATION
;
2945 if (EXPR_HAS_LOCATION (exp
))
2946 loc
= EXPR_LOCATION (exp
);
2948 tree maxobjsize
= max_object_size ();
2949 tree func
= get_callee_fndecl (exp
);
2951 tree len
= c_strlen (src
, 0);
2953 if (TREE_CODE (bound
) == INTEGER_CST
)
2955 if (!TREE_NO_WARNING (exp
)
2956 && tree_int_cst_lt (maxobjsize
, bound
)
2957 && warning_at (loc
, OPT_Wstringop_overflow_
,
2958 "%K%qD specified bound %E "
2959 "exceeds maximum object size %E",
2960 exp
, func
, bound
, maxobjsize
))
2961 TREE_NO_WARNING (exp
) = true;
2963 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
2966 len
= fold_convert_loc (loc
, size_type_node
, len
);
2967 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
2968 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2971 if (TREE_CODE (bound
) != SSA_NAME
)
2975 enum value_range_type rng
= get_range_info (bound
, &min
, &max
);
2976 if (rng
!= VR_RANGE
)
2979 if (!TREE_NO_WARNING (exp
)
2980 && wi::ltu_p (wi::to_wide (maxobjsize
), min
)
2981 && warning_at (loc
, OPT_Wstringop_overflow_
,
2982 "%K%qD specified bound [%wu, %wu] "
2983 "exceeds maximum object size %E",
2984 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), maxobjsize
))
2985 TREE_NO_WARNING (exp
) = true;
2987 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
2990 if (wi::gtu_p (min
, wi::to_wide (len
)))
2991 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2993 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
2994 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2997 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2998 bytes from constant string DATA + OFFSET and return it as target
3002 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3003 scalar_int_mode mode
)
3005 const char *str
= (const char *) data
;
3007 gcc_assert (offset
>= 0
3008 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3009 <= strlen (str
) + 1));
3011 return c_readstr (str
+ offset
, mode
);
3014 /* LEN specify length of the block of memcpy/memset operation.
3015 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3016 In some cases we can make very likely guess on max size, then we
3017 set it into PROBABLE_MAX_SIZE. */
3020 determine_block_size (tree len
, rtx len_rtx
,
3021 unsigned HOST_WIDE_INT
*min_size
,
3022 unsigned HOST_WIDE_INT
*max_size
,
3023 unsigned HOST_WIDE_INT
*probable_max_size
)
3025 if (CONST_INT_P (len_rtx
))
3027 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3033 enum value_range_type range_type
= VR_UNDEFINED
;
3035 /* Determine bounds from the type. */
3036 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3037 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3040 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3041 *probable_max_size
= *max_size
3042 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3044 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3046 if (TREE_CODE (len
) == SSA_NAME
)
3047 range_type
= get_range_info (len
, &min
, &max
);
3048 if (range_type
== VR_RANGE
)
3050 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3051 *min_size
= min
.to_uhwi ();
3052 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3053 *probable_max_size
= *max_size
= max
.to_uhwi ();
3055 else if (range_type
== VR_ANTI_RANGE
)
3057 /* Anti range 0...N lets us to determine minimal size to N+1. */
3060 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3061 *min_size
= max
.to_uhwi () + 1;
3069 Produce anti range allowing negative values of N. We still
3070 can use the information and make a guess that N is not negative.
3072 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3073 *probable_max_size
= min
.to_uhwi () - 1;
3076 gcc_checking_assert (*max_size
<=
3077 (unsigned HOST_WIDE_INT
)
3078 GET_MODE_MASK (GET_MODE (len_rtx
)));
3081 /* Try to verify that the sizes and lengths of the arguments to a string
3082 manipulation function given by EXP are within valid bounds and that
3083 the operation does not lead to buffer overflow or read past the end.
3084 Arguments other than EXP may be null. When non-null, the arguments
3085 have the following meaning:
3086 DST is the destination of a copy call or NULL otherwise.
3087 SRC is the source of a copy call or NULL otherwise.
3088 DSTWRITE is the number of bytes written into the destination obtained
3089 from the user-supplied size argument to the function (such as in
3090 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3091 MAXREAD is the user-supplied bound on the length of the source sequence
3092 (such as in strncat(d, s, N). It specifies the upper limit on the number
3093 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3094 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3095 expression EXP is a string function call (as opposed to a memory call
3096 like memcpy). As an exception, SRCSTR can also be an integer denoting
3097 the precomputed size of the source string or object (for functions like
3099 DSTSIZE is the size of the destination object specified by the last
3100 argument to the _chk builtins, typically resulting from the expansion
3101 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3104 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3107 If the call is successfully verified as safe return true, otherwise
3111 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3112 tree maxread
, tree srcstr
, tree dstsize
)
3114 int opt
= OPT_Wstringop_overflow_
;
3116 /* The size of the largest object is half the address space, or
3117 PTRDIFF_MAX. (This is way too permissive.) */
3118 tree maxobjsize
= max_object_size ();
3120 /* Either the length of the source string for string functions or
3121 the size of the source object for raw memory functions. */
3122 tree slen
= NULL_TREE
;
3124 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3126 /* Set to true when the exact number of bytes written by a string
3127 function like strcpy is not known and the only thing that is
3128 known is that it must be at least one (for the terminating nul). */
3129 bool at_least_one
= false;
3132 /* SRCSTR is normally a pointer to string but as a special case
3133 it can be an integer denoting the length of a string. */
3134 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3136 /* Try to determine the range of lengths the source string
3137 refers to. If it can be determined and is less than
3138 the upper bound given by MAXREAD add one to it for
3139 the terminating nul. Otherwise, set it to one for
3140 the same reason, or to MAXREAD as appropriate. */
3141 get_range_strlen (srcstr
, range
);
3142 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3144 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3145 range
[0] = range
[1] = maxread
;
3147 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3148 range
[0], size_one_node
);
3150 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3152 else if (!integer_all_onesp (range
[1]))
3153 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3154 range
[1], size_one_node
);
3160 at_least_one
= true;
3161 slen
= size_one_node
;
3168 if (!dstwrite
&& !maxread
)
3170 /* When the only available piece of data is the object size
3171 there is nothing to do. */
3175 /* Otherwise, when the length of the source sequence is known
3176 (as with strlen), set DSTWRITE to it. */
3182 dstsize
= maxobjsize
;
3185 get_size_range (dstwrite
, range
);
3187 tree func
= get_callee_fndecl (exp
);
3189 /* First check the number of bytes to be written against the maximum
3191 if (range
[0] && tree_int_cst_lt (maxobjsize
, range
[0]))
3193 if (TREE_NO_WARNING (exp
))
3196 location_t loc
= tree_nonartificial_location (exp
);
3197 loc
= expansion_point_location_if_in_system_header (loc
);
3200 if (range
[0] == range
[1])
3201 warned
= warning_at (loc
, opt
,
3202 "%K%qD specified size %E "
3203 "exceeds maximum object size %E",
3204 exp
, func
, range
[0], maxobjsize
);
3206 warned
= warning_at (loc
, opt
,
3207 "%K%qD specified size between %E and %E "
3208 "exceeds maximum object size %E",
3210 range
[0], range
[1], maxobjsize
);
3212 TREE_NO_WARNING (exp
) = true;
3217 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3218 constant, and in range of unsigned HOST_WIDE_INT. */
3219 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3221 /* Next check the number of bytes to be written against the destination
3223 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3226 && ((tree_fits_uhwi_p (dstsize
)
3227 && tree_int_cst_lt (dstsize
, range
[0]))
3228 || (tree_fits_uhwi_p (dstwrite
)
3229 && tree_int_cst_lt (dstwrite
, range
[0]))))
3231 if (TREE_NO_WARNING (exp
))
3234 location_t loc
= tree_nonartificial_location (exp
);
3235 loc
= expansion_point_location_if_in_system_header (loc
);
3237 if (dstwrite
== slen
&& at_least_one
)
3239 /* This is a call to strcpy with a destination of 0 size
3240 and a source of unknown length. The call will write
3241 at least one byte past the end of the destination. */
3242 warning_at (loc
, opt
,
3243 "%K%qD writing %E or more bytes into a region "
3244 "of size %E overflows the destination",
3245 exp
, func
, range
[0], dstsize
);
3247 else if (tree_int_cst_equal (range
[0], range
[1]))
3248 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3249 "%K%qD writing %E byte into a region "
3250 "of size %E overflows the destination",
3251 "%K%qD writing %E bytes into a region "
3252 "of size %E overflows the destination",
3253 exp
, func
, range
[0], dstsize
);
3254 else if (tree_int_cst_sign_bit (range
[1]))
3256 /* Avoid printing the upper bound if it's invalid. */
3257 warning_at (loc
, opt
,
3258 "%K%qD writing %E or more bytes into a region "
3259 "of size %E overflows the destination",
3260 exp
, func
, range
[0], dstsize
);
3263 warning_at (loc
, opt
,
3264 "%K%qD writing between %E and %E bytes into "
3265 "a region of size %E overflows the destination",
3266 exp
, func
, range
[0], range
[1],
3269 /* Return error when an overflow has been detected. */
3274 /* Check the maximum length of the source sequence against the size
3275 of the destination object if known, or against the maximum size
3279 get_size_range (maxread
, range
);
3281 /* Use the lower end for MAXREAD from now on. */
3285 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3287 location_t loc
= tree_nonartificial_location (exp
);
3288 loc
= expansion_point_location_if_in_system_header (loc
);
3290 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3292 if (TREE_NO_WARNING (exp
))
3295 /* Warn about crazy big sizes first since that's more
3296 likely to be meaningful than saying that the bound
3297 is greater than the object size if both are big. */
3298 if (range
[0] == range
[1])
3299 warning_at (loc
, opt
,
3300 "%K%qD specified bound %E "
3301 "exceeds maximum object size %E",
3303 range
[0], maxobjsize
);
3305 warning_at (loc
, opt
,
3306 "%K%qD specified bound between %E and %E "
3307 "exceeds maximum object size %E",
3309 range
[0], range
[1], maxobjsize
);
3314 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3316 if (TREE_NO_WARNING (exp
))
3319 if (tree_int_cst_equal (range
[0], range
[1]))
3320 warning_at (loc
, opt
,
3321 "%K%qD specified bound %E "
3322 "exceeds destination size %E",
3326 warning_at (loc
, opt
,
3327 "%K%qD specified bound between %E and %E "
3328 "exceeds destination size %E",
3330 range
[0], range
[1], dstsize
);
3336 /* Check for reading past the end of SRC. */
3339 && dstwrite
&& range
[0]
3340 && tree_int_cst_lt (slen
, range
[0]))
3342 if (TREE_NO_WARNING (exp
))
3345 location_t loc
= tree_nonartificial_location (exp
);
3347 if (tree_int_cst_equal (range
[0], range
[1]))
3348 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3349 "%K%qD reading %E byte from a region of size %E",
3350 "%K%qD reading %E bytes from a region of size %E",
3351 exp
, func
, range
[0], slen
);
3352 else if (tree_int_cst_sign_bit (range
[1]))
3354 /* Avoid printing the upper bound if it's invalid. */
3355 warning_at (loc
, opt
,
3356 "%K%qD reading %E or more bytes from a region "
3358 exp
, func
, range
[0], slen
);
3361 warning_at (loc
, opt
,
3362 "%K%qD reading between %E and %E bytes from a region "
3364 exp
, func
, range
[0], range
[1], slen
);
3371 /* Helper to compute the size of the object referenced by the DEST
3372 expression which must have pointer type, using Object Size type
3373 OSTYPE (only the least significant 2 bits are used). Return
3374 an estimate of the size of the object if successful or NULL when
3375 the size cannot be determined. When the referenced object involves
3376 a non-constant offset in some range the returned value represents
3377 the largest size given the smallest non-negative offset in the
3378 range. The function is intended for diagnostics and should not
3379 be used to influence code generation or optimization. */
3382 compute_objsize (tree dest
, int ostype
)
3384 unsigned HOST_WIDE_INT size
;
3386 /* Only the two least significant bits are meaningful. */
3389 if (compute_builtin_object_size (dest
, ostype
, &size
))
3390 return build_int_cst (sizetype
, size
);
3392 if (TREE_CODE (dest
) == SSA_NAME
)
3394 gimple
*stmt
= SSA_NAME_DEF_STMT (dest
);
3395 if (!is_gimple_assign (stmt
))
3398 dest
= gimple_assign_rhs1 (stmt
);
3400 tree_code code
= gimple_assign_rhs_code (stmt
);
3401 if (code
== POINTER_PLUS_EXPR
)
3403 /* compute_builtin_object_size fails for addresses with
3404 non-constant offsets. Try to determine the range of
3405 such an offset here and use it to adjust the constant
3407 tree off
= gimple_assign_rhs2 (stmt
);
3408 if (TREE_CODE (off
) == INTEGER_CST
)
3410 if (tree size
= compute_objsize (dest
, ostype
))
3412 wide_int wioff
= wi::to_wide (off
);
3413 wide_int wisiz
= wi::to_wide (size
);
3415 /* Ignore negative offsets for now. For others,
3416 use the lower bound as the most optimistic
3417 estimate of the (remaining) size. */
3418 if (wi::sign_mask (wioff
))
3420 else if (wi::ltu_p (wioff
, wisiz
))
3421 return wide_int_to_tree (TREE_TYPE (size
),
3422 wi::sub (wisiz
, wioff
));
3424 return size_zero_node
;
3427 else if (TREE_CODE (off
) == SSA_NAME
3428 && INTEGRAL_TYPE_P (TREE_TYPE (off
)))
3431 enum value_range_type rng
= get_range_info (off
, &min
, &max
);
3433 if (rng
== VR_RANGE
)
3435 if (tree size
= compute_objsize (dest
, ostype
))
3437 wide_int wisiz
= wi::to_wide (size
);
3439 /* Ignore negative offsets for now. For others,
3440 use the lower bound as the most optimistic
3441 estimate of the (remaining)size. */
3442 if (wi::sign_mask (min
))
3444 else if (wi::ltu_p (min
, wisiz
))
3445 return wide_int_to_tree (TREE_TYPE (size
),
3446 wi::sub (wisiz
, min
));
3448 return size_zero_node
;
3453 else if (code
!= ADDR_EXPR
)
3457 /* Unless computing the largest size (for memcpy and other raw memory
3458 functions), try to determine the size of the object from its type. */
3462 if (TREE_CODE (dest
) != ADDR_EXPR
)
3465 tree type
= TREE_TYPE (dest
);
3466 if (TREE_CODE (type
) == POINTER_TYPE
)
3467 type
= TREE_TYPE (type
);
3469 type
= TYPE_MAIN_VARIANT (type
);
3471 if (TREE_CODE (type
) == ARRAY_TYPE
3472 && !array_at_struct_end_p (TREE_OPERAND (dest
, 0)))
3474 /* Return the constant size unless it's zero (that's a zero-length
3475 array likely at the end of a struct). */
3476 tree size
= TYPE_SIZE_UNIT (type
);
3477 if (size
&& TREE_CODE (size
) == INTEGER_CST
3478 && !integer_zerop (size
))
3485 /* Helper to determine and check the sizes of the source and the destination
3486 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3487 call expression, DEST is the destination argument, SRC is the source
3488 argument or null, and LEN is the number of bytes. Use Object Size type-0
3489 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3490 (no overflow or invalid sizes), false otherwise. */
3493 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
3495 /* For functions like memset and memcpy that operate on raw memory
3496 try to determine the size of the largest source and destination
3497 object using type-0 Object Size regardless of the object size
3498 type specified by the option. */
3499 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3500 tree dstsize
= compute_objsize (dest
, 0);
3502 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
3506 /* Validate memchr arguments without performing any expansion.
3510 expand_builtin_memchr (tree exp
, rtx
)
3512 if (!validate_arglist (exp
,
3513 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3516 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3517 tree len
= CALL_EXPR_ARG (exp
, 2);
3519 /* Diagnose calls where the specified length exceeds the size
3521 if (warn_stringop_overflow
)
3523 tree size
= compute_objsize (arg1
, 0);
3524 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
3525 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
3531 /* Expand a call EXP to the memcpy builtin.
3532 Return NULL_RTX if we failed, the caller should emit a normal call,
3533 otherwise try to get the result in TARGET, if convenient (and in
3534 mode MODE if that's convenient). */
3537 expand_builtin_memcpy (tree exp
, rtx target
)
3539 if (!validate_arglist (exp
,
3540 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3543 tree dest
= CALL_EXPR_ARG (exp
, 0);
3544 tree src
= CALL_EXPR_ARG (exp
, 1);
3545 tree len
= CALL_EXPR_ARG (exp
, 2);
3547 check_memop_access (exp
, dest
, src
, len
);
3549 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3553 /* Check a call EXP to the memmove built-in for validity.
3554 Return NULL_RTX on both success and failure. */
3557 expand_builtin_memmove (tree exp
, rtx
)
3559 if (!validate_arglist (exp
,
3560 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3563 tree dest
= CALL_EXPR_ARG (exp
, 0);
3564 tree src
= CALL_EXPR_ARG (exp
, 1);
3565 tree len
= CALL_EXPR_ARG (exp
, 2);
3567 check_memop_access (exp
, dest
, src
, len
);
3572 /* Expand a call EXP to the mempcpy builtin.
3573 Return NULL_RTX if we failed; the caller should emit a normal call,
3574 otherwise try to get the result in TARGET, if convenient (and in
3575 mode MODE if that's convenient). If ENDP is 0 return the
3576 destination pointer, if ENDP is 1 return the end pointer ala
3577 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3581 expand_builtin_mempcpy (tree exp
, rtx target
)
3583 if (!validate_arglist (exp
,
3584 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3587 tree dest
= CALL_EXPR_ARG (exp
, 0);
3588 tree src
= CALL_EXPR_ARG (exp
, 1);
3589 tree len
= CALL_EXPR_ARG (exp
, 2);
3591 /* Policy does not generally allow using compute_objsize (which
3592 is used internally by check_memop_size) to change code generation
3593 or drive optimization decisions.
3595 In this instance it is safe because the code we generate has
3596 the same semantics regardless of the return value of
3597 check_memop_sizes. Exactly the same amount of data is copied
3598 and the return value is exactly the same in both cases.
3600 Furthermore, check_memop_size always uses mode 0 for the call to
3601 compute_objsize, so the imprecise nature of compute_objsize is
3604 /* Avoid expanding mempcpy into memcpy when the call is determined
3605 to overflow the buffer. This also prevents the same overflow
3606 from being diagnosed again when expanding memcpy. */
3607 if (!check_memop_access (exp
, dest
, src
, len
))
3610 return expand_builtin_mempcpy_args (dest
, src
, len
,
3611 target
, exp
, /*endp=*/ 1);
3614 /* Helper function to do the actual work for expand of memory copy family
3615 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3616 of memory from SRC to DEST and assign to TARGET if convenient.
3617 If ENDP is 0 return the
3618 destination pointer, if ENDP is 1 return the end pointer ala
3619 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3623 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3624 rtx target
, tree exp
, int endp
)
3626 const char *src_str
;
3627 unsigned int src_align
= get_pointer_alignment (src
);
3628 unsigned int dest_align
= get_pointer_alignment (dest
);
3629 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3630 HOST_WIDE_INT expected_size
= -1;
3631 unsigned int expected_align
= 0;
3632 unsigned HOST_WIDE_INT min_size
;
3633 unsigned HOST_WIDE_INT max_size
;
3634 unsigned HOST_WIDE_INT probable_max_size
;
3636 /* If DEST is not a pointer type, call the normal function. */
3637 if (dest_align
== 0)
3640 /* If either SRC is not a pointer type, don't do this
3641 operation in-line. */
3645 if (currently_expanding_gimple_stmt
)
3646 stringop_block_profile (currently_expanding_gimple_stmt
,
3647 &expected_align
, &expected_size
);
3649 if (expected_align
< dest_align
)
3650 expected_align
= dest_align
;
3651 dest_mem
= get_memory_rtx (dest
, len
);
3652 set_mem_align (dest_mem
, dest_align
);
3653 len_rtx
= expand_normal (len
);
3654 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3655 &probable_max_size
);
3656 src_str
= c_getstr (src
);
3658 /* If SRC is a string constant and block move would be done
3659 by pieces, we can avoid loading the string from memory
3660 and only stored the computed constants. */
3662 && CONST_INT_P (len_rtx
)
3663 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3664 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3665 CONST_CAST (char *, src_str
),
3668 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3669 builtin_memcpy_read_str
,
3670 CONST_CAST (char *, src_str
),
3671 dest_align
, false, endp
);
3672 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3673 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3677 src_mem
= get_memory_rtx (src
, len
);
3678 set_mem_align (src_mem
, src_align
);
3680 /* Copy word part most expediently. */
3681 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3682 if (CALL_EXPR_TAILCALL (exp
) && (endp
== 0 || target
== const0_rtx
))
3683 method
= BLOCK_OP_TAILCALL
;
3684 if (endp
== 1 && target
!= const0_rtx
)
3685 method
= BLOCK_OP_NO_LIBCALL_RET
;
3686 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3687 expected_align
, expected_size
,
3688 min_size
, max_size
, probable_max_size
);
3689 if (dest_addr
== pc_rtx
)
3694 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3695 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3698 if (endp
&& target
!= const0_rtx
)
3700 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3701 /* stpcpy pointer to last byte. */
3703 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3710 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3711 rtx target
, tree orig_exp
, int endp
)
3713 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3717 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3718 we failed, the caller should emit a normal call, otherwise try to
3719 get the result in TARGET, if convenient. If ENDP is 0 return the
3720 destination pointer, if ENDP is 1 return the end pointer ala
3721 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3725 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3727 struct expand_operand ops
[3];
3731 if (!targetm
.have_movstr ())
3734 dest_mem
= get_memory_rtx (dest
, NULL
);
3735 src_mem
= get_memory_rtx (src
, NULL
);
3738 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3739 dest_mem
= replace_equiv_address (dest_mem
, target
);
3742 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3743 create_fixed_operand (&ops
[1], dest_mem
);
3744 create_fixed_operand (&ops
[2], src_mem
);
3745 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3748 if (endp
&& target
!= const0_rtx
)
3750 target
= ops
[0].value
;
3751 /* movstr is supposed to set end to the address of the NUL
3752 terminator. If the caller requested a mempcpy-like return value,
3756 rtx tem
= plus_constant (GET_MODE (target
),
3757 gen_lowpart (GET_MODE (target
), target
), 1);
3758 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3764 /* Do some very basic size validation of a call to the strcpy builtin
3765 given by EXP. Return NULL_RTX to have the built-in expand to a call
3766 to the library function. */
3769 expand_builtin_strcat (tree exp
, rtx
)
3771 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3772 || !warn_stringop_overflow
)
3775 tree dest
= CALL_EXPR_ARG (exp
, 0);
3776 tree src
= CALL_EXPR_ARG (exp
, 1);
3778 /* There is no way here to determine the length of the string in
3779 the destination to which the SRC string is being appended so
3780 just diagnose cases when the souce string is longer than
3781 the destination object. */
3783 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3785 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
3791 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3792 NULL_RTX if we failed the caller should emit a normal call, otherwise
3793 try to get the result in TARGET, if convenient (and in mode MODE if that's
3797 expand_builtin_strcpy (tree exp
, rtx target
)
3799 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3802 tree dest
= CALL_EXPR_ARG (exp
, 0);
3803 tree src
= CALL_EXPR_ARG (exp
, 1);
3805 if (warn_stringop_overflow
)
3807 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3808 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
3812 if (rtx ret
= expand_builtin_strcpy_args (dest
, src
, target
))
3814 /* Check to see if the argument was declared attribute nonstring
3815 and if so, issue a warning since at this point it's not known
3816 to be nul-terminated. */
3817 tree fndecl
= get_callee_fndecl (exp
);
3818 maybe_warn_nonstring_arg (fndecl
, exp
);
3825 /* Helper function to do the actual work for expand_builtin_strcpy. The
3826 arguments to the builtin_strcpy call DEST and SRC are broken out
3827 so that this can also be called without constructing an actual CALL_EXPR.
3828 The other arguments and return value are the same as for
3829 expand_builtin_strcpy. */
3832 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3834 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3837 /* Expand a call EXP to the stpcpy builtin.
3838 Return NULL_RTX if we failed the caller should emit a normal call,
3839 otherwise try to get the result in TARGET, if convenient (and in
3840 mode MODE if that's convenient). */
3843 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
3846 location_t loc
= EXPR_LOCATION (exp
);
3848 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3851 dst
= CALL_EXPR_ARG (exp
, 0);
3852 src
= CALL_EXPR_ARG (exp
, 1);
3854 if (warn_stringop_overflow
)
3856 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
3857 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
3861 /* If return value is ignored, transform stpcpy into strcpy. */
3862 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3864 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3865 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3866 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3873 /* Ensure we get an actual string whose length can be evaluated at
3874 compile-time, not an expression containing a string. This is
3875 because the latter will potentially produce pessimized code
3876 when used to produce the return value. */
3877 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3878 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3880 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3881 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3882 target
, exp
, /*endp=*/2);
3887 if (TREE_CODE (len
) == INTEGER_CST
)
3889 rtx len_rtx
= expand_normal (len
);
3891 if (CONST_INT_P (len_rtx
))
3893 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3899 if (mode
!= VOIDmode
)
3900 target
= gen_reg_rtx (mode
);
3902 target
= gen_reg_rtx (GET_MODE (ret
));
3904 if (GET_MODE (target
) != GET_MODE (ret
))
3905 ret
= gen_lowpart (GET_MODE (target
), ret
);
3907 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3908 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3916 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3920 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3921 arguments while being careful to avoid duplicate warnings (which could
3922 be issued if the expander were to expand the call, resulting in it
3923 being emitted in expand_call(). */
3926 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3928 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
3930 /* The call has been successfully expanded. Check for nonstring
3931 arguments and issue warnings as appropriate. */
3932 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3939 /* Check a call EXP to the stpncpy built-in for validity.
3940 Return NULL_RTX on both success and failure. */
3943 expand_builtin_stpncpy (tree exp
, rtx
)
3945 if (!validate_arglist (exp
,
3946 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3947 || !warn_stringop_overflow
)
3950 /* The source and destination of the call. */
3951 tree dest
= CALL_EXPR_ARG (exp
, 0);
3952 tree src
= CALL_EXPR_ARG (exp
, 1);
3954 /* The exact number of bytes to write (not the maximum). */
3955 tree len
= CALL_EXPR_ARG (exp
, 2);
3957 /* The size of the destination object. */
3958 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3960 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
);
3965 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3966 bytes from constant string DATA + OFFSET and return it as target
3970 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3971 scalar_int_mode mode
)
3973 const char *str
= (const char *) data
;
3975 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3978 return c_readstr (str
+ offset
, mode
);
3981 /* Helper to check the sizes of sequences and the destination of calls
3982 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3983 success (no overflow or invalid sizes), false otherwise. */
3986 check_strncat_sizes (tree exp
, tree objsize
)
3988 tree dest
= CALL_EXPR_ARG (exp
, 0);
3989 tree src
= CALL_EXPR_ARG (exp
, 1);
3990 tree maxread
= CALL_EXPR_ARG (exp
, 2);
3992 /* Try to determine the range of lengths that the source expression
3995 get_range_strlen (src
, lenrange
);
3997 /* Try to verify that the destination is big enough for the shortest
4000 if (!objsize
&& warn_stringop_overflow
)
4002 /* If it hasn't been provided by __strncat_chk, try to determine
4003 the size of the destination object into which the source is
4005 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4008 /* Add one for the terminating nul. */
4009 tree srclen
= (lenrange
[0]
4010 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4014 /* The strncat function copies at most MAXREAD bytes and always appends
4015 the terminating nul so the specified upper bound should never be equal
4016 to (or greater than) the size of the destination. */
4017 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
4018 && tree_int_cst_equal (objsize
, maxread
))
4020 location_t loc
= tree_nonartificial_location (exp
);
4021 loc
= expansion_point_location_if_in_system_header (loc
);
4023 warning_at (loc
, OPT_Wstringop_overflow_
,
4024 "%K%qD specified bound %E equals destination size",
4025 exp
, get_callee_fndecl (exp
), maxread
);
4031 || (maxread
&& tree_fits_uhwi_p (maxread
)
4032 && tree_fits_uhwi_p (srclen
)
4033 && tree_int_cst_lt (maxread
, srclen
)))
4036 /* The number of bytes to write is LEN but check_access will also
4037 check SRCLEN if LEN's value isn't known. */
4038 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
4042 /* Similar to expand_builtin_strcat, do some very basic size validation
4043 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4044 the built-in expand to a call to the library function. */
4047 expand_builtin_strncat (tree exp
, rtx
)
4049 if (!validate_arglist (exp
,
4050 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4051 || !warn_stringop_overflow
)
4054 tree dest
= CALL_EXPR_ARG (exp
, 0);
4055 tree src
= CALL_EXPR_ARG (exp
, 1);
4056 /* The upper bound on the number of bytes to write. */
4057 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4058 /* The length of the source sequence. */
4059 tree slen
= c_strlen (src
, 1);
4061 /* Try to determine the range of lengths that the source expression
4065 lenrange
[0] = lenrange
[1] = slen
;
4067 get_range_strlen (src
, lenrange
);
4069 /* Try to verify that the destination is big enough for the shortest
4070 string. First try to determine the size of the destination object
4071 into which the source is being copied. */
4072 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4074 /* Add one for the terminating nul. */
4075 tree srclen
= (lenrange
[0]
4076 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4080 /* The strncat function copies at most MAXREAD bytes and always appends
4081 the terminating nul so the specified upper bound should never be equal
4082 to (or greater than) the size of the destination. */
4083 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4084 && tree_int_cst_equal (destsize
, maxread
))
4086 location_t loc
= tree_nonartificial_location (exp
);
4087 loc
= expansion_point_location_if_in_system_header (loc
);
4089 warning_at (loc
, OPT_Wstringop_overflow_
,
4090 "%K%qD specified bound %E equals destination size",
4091 exp
, get_callee_fndecl (exp
), maxread
);
4097 || (maxread
&& tree_fits_uhwi_p (maxread
)
4098 && tree_fits_uhwi_p (srclen
)
4099 && tree_int_cst_lt (maxread
, srclen
)))
4102 /* The number of bytes to write is SRCLEN. */
4103 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
);
4108 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4109 NULL_RTX if we failed the caller should emit a normal call. */
4112 expand_builtin_strncpy (tree exp
, rtx target
)
4114 location_t loc
= EXPR_LOCATION (exp
);
4116 if (validate_arglist (exp
,
4117 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4119 tree dest
= CALL_EXPR_ARG (exp
, 0);
4120 tree src
= CALL_EXPR_ARG (exp
, 1);
4121 /* The number of bytes to write (not the maximum). */
4122 tree len
= CALL_EXPR_ARG (exp
, 2);
4123 /* The length of the source sequence. */
4124 tree slen
= c_strlen (src
, 1);
4126 if (warn_stringop_overflow
)
4128 tree destsize
= compute_objsize (dest
,
4129 warn_stringop_overflow
- 1);
4131 /* The number of bytes to write is LEN but check_access will also
4132 check SLEN if LEN's value isn't known. */
4133 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4137 /* We must be passed a constant len and src parameter. */
4138 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4141 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4143 /* We're required to pad with trailing zeros if the requested
4144 len is greater than strlen(s2)+1. In that case try to
4145 use store_by_pieces, if it fails, punt. */
4146 if (tree_int_cst_lt (slen
, len
))
4148 unsigned int dest_align
= get_pointer_alignment (dest
);
4149 const char *p
= c_getstr (src
);
4152 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4153 || !can_store_by_pieces (tree_to_uhwi (len
),
4154 builtin_strncpy_read_str
,
4155 CONST_CAST (char *, p
),
4159 dest_mem
= get_memory_rtx (dest
, len
);
4160 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4161 builtin_strncpy_read_str
,
4162 CONST_CAST (char *, p
), dest_align
, false, 0);
4163 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4164 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4171 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4172 bytes from constant string DATA + OFFSET and return it as target
4176 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4177 scalar_int_mode mode
)
4179 const char *c
= (const char *) data
;
4180 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4182 memset (p
, *c
, GET_MODE_SIZE (mode
));
4184 return c_readstr (p
, mode
);
4187 /* Callback routine for store_by_pieces. Return the RTL of a register
4188 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4189 char value given in the RTL register data. For example, if mode is
4190 4 bytes wide, return the RTL for 0x01010101*data. */
4193 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4194 scalar_int_mode mode
)
4200 size
= GET_MODE_SIZE (mode
);
4204 p
= XALLOCAVEC (char, size
);
4205 memset (p
, 1, size
);
4206 coeff
= c_readstr (p
, mode
);
4208 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4209 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4210 return force_reg (mode
, target
);
4213 /* Expand expression EXP, which is a call to the memset builtin. Return
4214 NULL_RTX if we failed the caller should emit a normal call, otherwise
4215 try to get the result in TARGET, if convenient (and in mode MODE if that's
4219 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4221 if (!validate_arglist (exp
,
4222 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4225 tree dest
= CALL_EXPR_ARG (exp
, 0);
4226 tree val
= CALL_EXPR_ARG (exp
, 1);
4227 tree len
= CALL_EXPR_ARG (exp
, 2);
4229 check_memop_access (exp
, dest
, NULL_TREE
, len
);
4231 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4234 /* Helper function to do the actual work for expand_builtin_memset. The
4235 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4236 so that this can also be called without constructing an actual CALL_EXPR.
4237 The other arguments and return value are the same as for
4238 expand_builtin_memset. */
4241 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4242 rtx target
, machine_mode mode
, tree orig_exp
)
4245 enum built_in_function fcode
;
4246 machine_mode val_mode
;
4248 unsigned int dest_align
;
4249 rtx dest_mem
, dest_addr
, len_rtx
;
4250 HOST_WIDE_INT expected_size
= -1;
4251 unsigned int expected_align
= 0;
4252 unsigned HOST_WIDE_INT min_size
;
4253 unsigned HOST_WIDE_INT max_size
;
4254 unsigned HOST_WIDE_INT probable_max_size
;
4256 dest_align
= get_pointer_alignment (dest
);
4258 /* If DEST is not a pointer type, don't do this operation in-line. */
4259 if (dest_align
== 0)
4262 if (currently_expanding_gimple_stmt
)
4263 stringop_block_profile (currently_expanding_gimple_stmt
,
4264 &expected_align
, &expected_size
);
4266 if (expected_align
< dest_align
)
4267 expected_align
= dest_align
;
4269 /* If the LEN parameter is zero, return DEST. */
4270 if (integer_zerop (len
))
4272 /* Evaluate and ignore VAL in case it has side-effects. */
4273 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4274 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4277 /* Stabilize the arguments in case we fail. */
4278 dest
= builtin_save_expr (dest
);
4279 val
= builtin_save_expr (val
);
4280 len
= builtin_save_expr (len
);
4282 len_rtx
= expand_normal (len
);
4283 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4284 &probable_max_size
);
4285 dest_mem
= get_memory_rtx (dest
, len
);
4286 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4288 if (TREE_CODE (val
) != INTEGER_CST
)
4292 val_rtx
= expand_normal (val
);
4293 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4295 /* Assume that we can memset by pieces if we can store
4296 * the coefficients by pieces (in the required modes).
4297 * We can't pass builtin_memset_gen_str as that emits RTL. */
4299 if (tree_fits_uhwi_p (len
)
4300 && can_store_by_pieces (tree_to_uhwi (len
),
4301 builtin_memset_read_str
, &c
, dest_align
,
4304 val_rtx
= force_reg (val_mode
, val_rtx
);
4305 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4306 builtin_memset_gen_str
, val_rtx
, dest_align
,
4309 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4310 dest_align
, expected_align
,
4311 expected_size
, min_size
, max_size
,
4315 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4316 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4320 if (target_char_cast (val
, &c
))
4325 if (tree_fits_uhwi_p (len
)
4326 && can_store_by_pieces (tree_to_uhwi (len
),
4327 builtin_memset_read_str
, &c
, dest_align
,
4329 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4330 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4331 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4332 gen_int_mode (c
, val_mode
),
4333 dest_align
, expected_align
,
4334 expected_size
, min_size
, max_size
,
4338 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4339 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4343 set_mem_align (dest_mem
, dest_align
);
4344 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4345 CALL_EXPR_TAILCALL (orig_exp
)
4346 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4347 expected_align
, expected_size
,
4353 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4354 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4360 fndecl
= get_callee_fndecl (orig_exp
);
4361 fcode
= DECL_FUNCTION_CODE (fndecl
);
4362 if (fcode
== BUILT_IN_MEMSET
)
4363 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4365 else if (fcode
== BUILT_IN_BZERO
)
4366 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4370 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4371 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4372 return expand_call (fn
, target
, target
== const0_rtx
);
4375 /* Expand expression EXP, which is a call to the bzero builtin. Return
4376 NULL_RTX if we failed the caller should emit a normal call. */
4379 expand_builtin_bzero (tree exp
)
4381 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4384 tree dest
= CALL_EXPR_ARG (exp
, 0);
4385 tree size
= CALL_EXPR_ARG (exp
, 1);
4387 check_memop_access (exp
, dest
, NULL_TREE
, size
);
4389 /* New argument list transforming bzero(ptr x, int y) to
4390 memset(ptr x, int 0, size_t y). This is done this way
4391 so that if it isn't expanded inline, we fallback to
4392 calling bzero instead of memset. */
4394 location_t loc
= EXPR_LOCATION (exp
);
4396 return expand_builtin_memset_args (dest
, integer_zero_node
,
4397 fold_convert_loc (loc
,
4398 size_type_node
, size
),
4399 const0_rtx
, VOIDmode
, exp
);
4402 /* Try to expand cmpstr operation ICODE with the given operands.
4403 Return the result rtx on success, otherwise return null. */
4406 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4407 HOST_WIDE_INT align
)
4409 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4411 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4414 struct expand_operand ops
[4];
4415 create_output_operand (&ops
[0], target
, insn_mode
);
4416 create_fixed_operand (&ops
[1], arg1_rtx
);
4417 create_fixed_operand (&ops
[2], arg2_rtx
);
4418 create_integer_operand (&ops
[3], align
);
4419 if (maybe_expand_insn (icode
, 4, ops
))
4420 return ops
[0].value
;
4424 /* Expand expression EXP, which is a call to the memcmp built-in function.
4425 Return NULL_RTX if we failed and the caller should emit a normal call,
4426 otherwise try to get the result in TARGET, if convenient.
4427 RESULT_EQ is true if we can relax the returned value to be either zero
4428 or nonzero, without caring about the sign. */
4431 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4433 if (!validate_arglist (exp
,
4434 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4437 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4438 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4439 tree len
= CALL_EXPR_ARG (exp
, 2);
4441 /* Diagnose calls where the specified length exceeds the size of either
4443 if (warn_stringop_overflow
)
4445 tree size
= compute_objsize (arg1
, 0);
4446 if (check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4447 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
))
4449 size
= compute_objsize (arg2
, 0);
4450 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4451 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
4455 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4456 location_t loc
= EXPR_LOCATION (exp
);
4458 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4459 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4461 /* If we don't have POINTER_TYPE, call the function. */
4462 if (arg1_align
== 0 || arg2_align
== 0)
4465 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4466 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4467 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4469 /* Set MEM_SIZE as appropriate. */
4470 if (CONST_INT_P (len_rtx
))
4472 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4473 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4476 by_pieces_constfn constfn
= NULL
;
4478 const char *src_str
= c_getstr (arg2
);
4479 if (result_eq
&& src_str
== NULL
)
4481 src_str
= c_getstr (arg1
);
4482 if (src_str
!= NULL
)
4483 std::swap (arg1_rtx
, arg2_rtx
);
4486 /* If SRC is a string constant and block move would be done
4487 by pieces, we can avoid loading the string from memory
4488 and only stored the computed constants. */
4490 && CONST_INT_P (len_rtx
)
4491 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4492 constfn
= builtin_memcpy_read_str
;
4494 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4495 TREE_TYPE (len
), target
,
4497 CONST_CAST (char *, src_str
));
4501 /* Return the value in the proper mode for this function. */
4502 if (GET_MODE (result
) == mode
)
4507 convert_move (target
, result
, 0);
4511 return convert_to_mode (mode
, result
, 0);
4517 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4518 if we failed the caller should emit a normal call, otherwise try to get
4519 the result in TARGET, if convenient. */
4522 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4524 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4527 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4528 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4529 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4532 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4533 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4535 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4536 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4538 /* If we don't have POINTER_TYPE, call the function. */
4539 if (arg1_align
== 0 || arg2_align
== 0)
4542 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4543 arg1
= builtin_save_expr (arg1
);
4544 arg2
= builtin_save_expr (arg2
);
4546 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4547 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4549 rtx result
= NULL_RTX
;
4550 /* Try to call cmpstrsi. */
4551 if (cmpstr_icode
!= CODE_FOR_nothing
)
4552 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4553 MIN (arg1_align
, arg2_align
));
4555 /* Try to determine at least one length and call cmpstrnsi. */
4556 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4561 tree len1
= c_strlen (arg1
, 1);
4562 tree len2
= c_strlen (arg2
, 1);
4565 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4567 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4569 /* If we don't have a constant length for the first, use the length
4570 of the second, if we know it. We don't require a constant for
4571 this case; some cost analysis could be done if both are available
4572 but neither is constant. For now, assume they're equally cheap,
4573 unless one has side effects. If both strings have constant lengths,
4580 else if (TREE_SIDE_EFFECTS (len1
))
4582 else if (TREE_SIDE_EFFECTS (len2
))
4584 else if (TREE_CODE (len1
) != INTEGER_CST
)
4586 else if (TREE_CODE (len2
) != INTEGER_CST
)
4588 else if (tree_int_cst_lt (len1
, len2
))
4593 /* If both arguments have side effects, we cannot optimize. */
4594 if (len
&& !TREE_SIDE_EFFECTS (len
))
4596 arg3_rtx
= expand_normal (len
);
4597 result
= expand_cmpstrn_or_cmpmem
4598 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4599 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4603 tree fndecl
= get_callee_fndecl (exp
);
4606 /* Check to see if the argument was declared attribute nonstring
4607 and if so, issue a warning since at this point it's not known
4608 to be nul-terminated. */
4609 maybe_warn_nonstring_arg (fndecl
, exp
);
4611 /* Return the value in the proper mode for this function. */
4612 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4613 if (GET_MODE (result
) == mode
)
4616 return convert_to_mode (mode
, result
, 0);
4617 convert_move (target
, result
, 0);
4621 /* Expand the library call ourselves using a stabilized argument
4622 list to avoid re-evaluating the function's arguments twice. */
4623 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4624 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4625 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4626 return expand_call (fn
, target
, target
== const0_rtx
);
4629 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4630 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4631 the result in TARGET, if convenient. */
4634 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4635 ATTRIBUTE_UNUSED machine_mode mode
)
4637 if (!validate_arglist (exp
,
4638 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4641 /* If c_strlen can determine an expression for one of the string
4642 lengths, and it doesn't have side effects, then emit cmpstrnsi
4643 using length MIN(strlen(string)+1, arg3). */
4644 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4645 if (cmpstrn_icode
== CODE_FOR_nothing
)
4650 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4651 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4652 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4654 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4655 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4657 tree len1
= c_strlen (arg1
, 1);
4658 tree len2
= c_strlen (arg2
, 1);
4660 location_t loc
= EXPR_LOCATION (exp
);
4663 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4665 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4667 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4669 /* If we don't have a constant length for the first, use the length
4670 of the second, if we know it. If neither string is constant length,
4671 use the given length argument. We don't require a constant for
4672 this case; some cost analysis could be done if both are available
4673 but neither is constant. For now, assume they're equally cheap,
4674 unless one has side effects. If both strings have constant lengths,
4683 else if (TREE_SIDE_EFFECTS (len1
))
4685 else if (TREE_SIDE_EFFECTS (len2
))
4687 else if (TREE_CODE (len1
) != INTEGER_CST
)
4689 else if (TREE_CODE (len2
) != INTEGER_CST
)
4691 else if (tree_int_cst_lt (len1
, len2
))
4696 /* If we are not using the given length, we must incorporate it here.
4697 The actual new length parameter will be MIN(len,arg3) in this case. */
4699 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4700 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4701 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4702 rtx arg3_rtx
= expand_normal (len
);
4703 rtx result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4704 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4705 MIN (arg1_align
, arg2_align
));
4707 tree fndecl
= get_callee_fndecl (exp
);
4710 /* Check to see if the argument was declared attribute nonstring
4711 and if so, issue a warning since at this point it's not known
4712 to be nul-terminated. */
4713 maybe_warn_nonstring_arg (fndecl
, exp
);
4715 /* Return the value in the proper mode for this function. */
4716 mode
= TYPE_MODE (TREE_TYPE (exp
));
4717 if (GET_MODE (result
) == mode
)
4720 return convert_to_mode (mode
, result
, 0);
4721 convert_move (target
, result
, 0);
4725 /* Expand the library call ourselves using a stabilized argument
4726 list to avoid re-evaluating the function's arguments twice. */
4727 tree fn
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
4728 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4729 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4730 return expand_call (fn
, target
, target
== const0_rtx
);
4733 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4734 if that's convenient. */
4737 expand_builtin_saveregs (void)
4742 /* Don't do __builtin_saveregs more than once in a function.
4743 Save the result of the first call and reuse it. */
4744 if (saveregs_value
!= 0)
4745 return saveregs_value
;
4747 /* When this function is called, it means that registers must be
4748 saved on entry to this function. So we migrate the call to the
4749 first insn of this function. */
4753 /* Do whatever the machine needs done in this case. */
4754 val
= targetm
.calls
.expand_builtin_saveregs ();
4759 saveregs_value
= val
;
4761 /* Put the insns after the NOTE that starts the function. If this
4762 is inside a start_sequence, make the outer-level insn chain current, so
4763 the code is placed at the start of the function. */
4764 push_topmost_sequence ();
4765 emit_insn_after (seq
, entry_of_function ());
4766 pop_topmost_sequence ();
4771 /* Expand a call to __builtin_next_arg. */
4774 expand_builtin_next_arg (void)
4776 /* Checking arguments is already done in fold_builtin_next_arg
4777 that must be called before this function. */
4778 return expand_binop (ptr_mode
, add_optab
,
4779 crtl
->args
.internal_arg_pointer
,
4780 crtl
->args
.arg_offset_rtx
,
4781 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4784 /* Make it easier for the backends by protecting the valist argument
4785 from multiple evaluations. */
4788 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4790 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4792 /* The current way of determining the type of valist is completely
4793 bogus. We should have the information on the va builtin instead. */
4795 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4797 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4799 if (TREE_SIDE_EFFECTS (valist
))
4800 valist
= save_expr (valist
);
4802 /* For this case, the backends will be expecting a pointer to
4803 vatype, but it's possible we've actually been given an array
4804 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4806 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4808 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4809 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4814 tree pt
= build_pointer_type (vatype
);
4818 if (! TREE_SIDE_EFFECTS (valist
))
4821 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4822 TREE_SIDE_EFFECTS (valist
) = 1;
4825 if (TREE_SIDE_EFFECTS (valist
))
4826 valist
= save_expr (valist
);
4827 valist
= fold_build2_loc (loc
, MEM_REF
,
4828 vatype
, valist
, build_int_cst (pt
, 0));
4834 /* The "standard" definition of va_list is void*. */
4837 std_build_builtin_va_list (void)
4839 return ptr_type_node
;
4842 /* The "standard" abi va_list is va_list_type_node. */
4845 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4847 return va_list_type_node
;
4850 /* The "standard" type of va_list is va_list_type_node. */
4853 std_canonical_va_list_type (tree type
)
4857 wtype
= va_list_type_node
;
4860 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4862 /* If va_list is an array type, the argument may have decayed
4863 to a pointer type, e.g. by being passed to another function.
4864 In that case, unwrap both types so that we can compare the
4865 underlying records. */
4866 if (TREE_CODE (htype
) == ARRAY_TYPE
4867 || POINTER_TYPE_P (htype
))
4869 wtype
= TREE_TYPE (wtype
);
4870 htype
= TREE_TYPE (htype
);
4873 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4874 return va_list_type_node
;
4879 /* The "standard" implementation of va_start: just assign `nextarg' to
4883 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4885 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4886 convert_move (va_r
, nextarg
, 0);
4889 /* Expand EXP, a call to __builtin_va_start. */
4892 expand_builtin_va_start (tree exp
)
4896 location_t loc
= EXPR_LOCATION (exp
);
4898 if (call_expr_nargs (exp
) < 2)
4900 error_at (loc
, "too few arguments to function %<va_start%>");
4904 if (fold_builtin_next_arg (exp
, true))
4907 nextarg
= expand_builtin_next_arg ();
4908 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4910 if (targetm
.expand_builtin_va_start
)
4911 targetm
.expand_builtin_va_start (valist
, nextarg
);
4913 std_expand_builtin_va_start (valist
, nextarg
);
4918 /* Expand EXP, a call to __builtin_va_end. */
4921 expand_builtin_va_end (tree exp
)
4923 tree valist
= CALL_EXPR_ARG (exp
, 0);
4925 /* Evaluate for side effects, if needed. I hate macros that don't
4927 if (TREE_SIDE_EFFECTS (valist
))
4928 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4933 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4934 builtin rather than just as an assignment in stdarg.h because of the
4935 nastiness of array-type va_list types. */
4938 expand_builtin_va_copy (tree exp
)
4941 location_t loc
= EXPR_LOCATION (exp
);
4943 dst
= CALL_EXPR_ARG (exp
, 0);
4944 src
= CALL_EXPR_ARG (exp
, 1);
4946 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4947 src
= stabilize_va_list_loc (loc
, src
, 0);
4949 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4951 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4953 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4954 TREE_SIDE_EFFECTS (t
) = 1;
4955 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4959 rtx dstb
, srcb
, size
;
4961 /* Evaluate to pointers. */
4962 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4963 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4964 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4965 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4967 dstb
= convert_memory_address (Pmode
, dstb
);
4968 srcb
= convert_memory_address (Pmode
, srcb
);
4970 /* "Dereference" to BLKmode memories. */
4971 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4972 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4973 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4974 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4975 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4976 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4979 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4985 /* Expand a call to one of the builtin functions __builtin_frame_address or
4986 __builtin_return_address. */
4989 expand_builtin_frame_address (tree fndecl
, tree exp
)
4991 /* The argument must be a nonnegative integer constant.
4992 It counts the number of frames to scan up the stack.
4993 The value is either the frame pointer value or the return
4994 address saved in that frame. */
4995 if (call_expr_nargs (exp
) == 0)
4996 /* Warning about missing arg was already issued. */
4998 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
5000 error ("invalid argument to %qD", fndecl
);
5005 /* Number of frames to scan up the stack. */
5006 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5008 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5010 /* Some ports cannot access arbitrary stack frames. */
5013 warning (0, "unsupported argument to %qD", fndecl
);
5019 /* Warn since no effort is made to ensure that any frame
5020 beyond the current one exists or can be safely reached. */
5021 warning (OPT_Wframe_address
, "calling %qD with "
5022 "a nonzero argument is unsafe", fndecl
);
5025 /* For __builtin_frame_address, return what we've got. */
5026 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5030 && ! CONSTANT_P (tem
))
5031 tem
= copy_addr_to_reg (tem
);
5036 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5037 failed and the caller should emit a normal call. */
5040 expand_builtin_alloca (tree exp
)
5045 tree fndecl
= get_callee_fndecl (exp
);
5046 HOST_WIDE_INT max_size
;
5047 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5048 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5050 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5051 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5053 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5054 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5055 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5060 if ((alloca_for_var
&& !warn_vla_limit
)
5061 || (!alloca_for_var
&& !warn_alloca_limit
))
5063 /* -Walloca-larger-than and -Wvla-larger-than settings override
5064 the more general -Walloc-size-larger-than so unless either of
5065 the former options is specified check the alloca arguments for
5067 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5068 int idx
[] = { 0, -1 };
5069 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5072 /* Compute the argument. */
5073 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5075 /* Compute the alignment. */
5076 align
= (fcode
== BUILT_IN_ALLOCA
5078 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5080 /* Compute the maximum size. */
5081 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5082 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5085 /* Allocate the desired space. If the allocation stems from the declaration
5086 of a variable-sized object, it cannot accumulate. */
5088 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5089 result
= convert_memory_address (ptr_mode
, result
);
5094 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5095 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5096 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5097 handle_builtin_stack_restore function. */
5100 expand_asan_emit_allocas_unpoison (tree exp
)
5102 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5103 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5104 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5105 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5106 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
5107 stack_pointer_rtx
, NULL_RTX
, 0,
5109 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
5110 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
5112 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5113 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5114 top
, ptr_mode
, bot
, ptr_mode
);
5118 /* Expand a call to bswap builtin in EXP.
5119 Return NULL_RTX if a normal call should be emitted rather than expanding the
5120 function in-line. If convenient, the result should be placed in TARGET.
5121 SUBTARGET may be used as the target for computing one of EXP's operands. */
5124 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5130 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5133 arg
= CALL_EXPR_ARG (exp
, 0);
5134 op0
= expand_expr (arg
,
5135 subtarget
&& GET_MODE (subtarget
) == target_mode
5136 ? subtarget
: NULL_RTX
,
5137 target_mode
, EXPAND_NORMAL
);
5138 if (GET_MODE (op0
) != target_mode
)
5139 op0
= convert_to_mode (target_mode
, op0
, 1);
5141 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5143 gcc_assert (target
);
5145 return convert_to_mode (target_mode
, target
, 1);
5148 /* Expand a call to a unary builtin in EXP.
5149 Return NULL_RTX if a normal call should be emitted rather than expanding the
5150 function in-line. If convenient, the result should be placed in TARGET.
5151 SUBTARGET may be used as the target for computing one of EXP's operands. */
5154 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5155 rtx subtarget
, optab op_optab
)
5159 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5162 /* Compute the argument. */
5163 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5165 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5166 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5167 VOIDmode
, EXPAND_NORMAL
);
5168 /* Compute op, into TARGET if possible.
5169 Set TARGET to wherever the result comes back. */
5170 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5171 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5172 gcc_assert (target
);
5174 return convert_to_mode (target_mode
, target
, 0);
5177 /* Expand a call to __builtin_expect. We just return our argument
5178 as the builtin_expect semantic should've been already executed by
5179 tree branch prediction pass. */
5182 expand_builtin_expect (tree exp
, rtx target
)
5186 if (call_expr_nargs (exp
) < 2)
5188 arg
= CALL_EXPR_ARG (exp
, 0);
5190 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5191 /* When guessing was done, the hints should be already stripped away. */
5192 gcc_assert (!flag_guess_branch_prob
5193 || optimize
== 0 || seen_error ());
5197 /* Expand a call to __builtin_assume_aligned. We just return our first
5198 argument as the builtin_assume_aligned semantic should've been already
5202 expand_builtin_assume_aligned (tree exp
, rtx target
)
5204 if (call_expr_nargs (exp
) < 2)
5206 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5208 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5209 && (call_expr_nargs (exp
) < 3
5210 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5215 expand_builtin_trap (void)
5217 if (targetm
.have_trap ())
5219 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5220 /* For trap insns when not accumulating outgoing args force
5221 REG_ARGS_SIZE note to prevent crossjumping of calls with
5222 different args sizes. */
5223 if (!ACCUMULATE_OUTGOING_ARGS
)
5224 add_args_size_note (insn
, stack_pointer_delta
);
5228 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5229 tree call_expr
= build_call_expr (fn
, 0);
5230 expand_call (call_expr
, NULL_RTX
, false);
5236 /* Expand a call to __builtin_unreachable. We do nothing except emit
5237 a barrier saying that control flow will not pass here.
5239 It is the responsibility of the program being compiled to ensure
5240 that control flow does never reach __builtin_unreachable. */
5242 expand_builtin_unreachable (void)
5247 /* Expand EXP, a call to fabs, fabsf or fabsl.
5248 Return NULL_RTX if a normal call should be emitted rather than expanding
5249 the function inline. If convenient, the result should be placed
5250 in TARGET. SUBTARGET may be used as the target for computing
5254 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5260 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5263 arg
= CALL_EXPR_ARG (exp
, 0);
5264 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5265 mode
= TYPE_MODE (TREE_TYPE (arg
));
5266 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5267 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5270 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5271 Return NULL is a normal call should be emitted rather than expanding the
5272 function inline. If convenient, the result should be placed in TARGET.
5273 SUBTARGET may be used as the target for computing the operand. */
5276 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5281 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5284 arg
= CALL_EXPR_ARG (exp
, 0);
5285 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5287 arg
= CALL_EXPR_ARG (exp
, 1);
5288 op1
= expand_normal (arg
);
5290 return expand_copysign (op0
, op1
, target
);
5293 /* Expand a call to __builtin___clear_cache. */
5296 expand_builtin___clear_cache (tree exp
)
5298 if (!targetm
.code_for_clear_cache
)
5300 #ifdef CLEAR_INSN_CACHE
5301 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5302 does something. Just do the default expansion to a call to
5306 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5307 does nothing. There is no need to call it. Do nothing. */
5309 #endif /* CLEAR_INSN_CACHE */
5312 /* We have a "clear_cache" insn, and it will handle everything. */
5314 rtx begin_rtx
, end_rtx
;
5316 /* We must not expand to a library call. If we did, any
5317 fallback library function in libgcc that might contain a call to
5318 __builtin___clear_cache() would recurse infinitely. */
5319 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5321 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5325 if (targetm
.have_clear_cache ())
5327 struct expand_operand ops
[2];
5329 begin
= CALL_EXPR_ARG (exp
, 0);
5330 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5332 end
= CALL_EXPR_ARG (exp
, 1);
5333 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5335 create_address_operand (&ops
[0], begin_rtx
);
5336 create_address_operand (&ops
[1], end_rtx
);
5337 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5343 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5346 round_trampoline_addr (rtx tramp
)
5348 rtx temp
, addend
, mask
;
5350 /* If we don't need too much alignment, we'll have been guaranteed
5351 proper alignment by get_trampoline_type. */
5352 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5355 /* Round address up to desired boundary. */
5356 temp
= gen_reg_rtx (Pmode
);
5357 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5358 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5360 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5361 temp
, 0, OPTAB_LIB_WIDEN
);
5362 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5363 temp
, 0, OPTAB_LIB_WIDEN
);
5369 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5371 tree t_tramp
, t_func
, t_chain
;
5372 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5374 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5375 POINTER_TYPE
, VOID_TYPE
))
5378 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5379 t_func
= CALL_EXPR_ARG (exp
, 1);
5380 t_chain
= CALL_EXPR_ARG (exp
, 2);
5382 r_tramp
= expand_normal (t_tramp
);
5383 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5384 MEM_NOTRAP_P (m_tramp
) = 1;
5386 /* If ONSTACK, the TRAMP argument should be the address of a field
5387 within the local function's FRAME decl. Either way, let's see if
5388 we can fill in the MEM_ATTRs for this memory. */
5389 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5390 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5392 /* Creator of a heap trampoline is responsible for making sure the
5393 address is aligned to at least STACK_BOUNDARY. Normally malloc
5394 will ensure this anyhow. */
5395 tmp
= round_trampoline_addr (r_tramp
);
5398 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5399 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5400 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5403 /* The FUNC argument should be the address of the nested function.
5404 Extract the actual function decl to pass to the hook. */
5405 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5406 t_func
= TREE_OPERAND (t_func
, 0);
5407 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5409 r_chain
= expand_normal (t_chain
);
5411 /* Generate insns to initialize the trampoline. */
5412 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5416 trampolines_created
= 1;
5418 if (targetm
.calls
.custom_function_descriptors
!= 0)
5419 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5420 "trampoline generated for nested function %qD", t_func
);
5427 expand_builtin_adjust_trampoline (tree exp
)
5431 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5434 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5435 tramp
= round_trampoline_addr (tramp
);
5436 if (targetm
.calls
.trampoline_adjust_address
)
5437 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5442 /* Expand a call to the builtin descriptor initialization routine.
5443 A descriptor is made up of a couple of pointers to the static
5444 chain and the code entry in this order. */
5447 expand_builtin_init_descriptor (tree exp
)
5449 tree t_descr
, t_func
, t_chain
;
5450 rtx m_descr
, r_descr
, r_func
, r_chain
;
5452 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5456 t_descr
= CALL_EXPR_ARG (exp
, 0);
5457 t_func
= CALL_EXPR_ARG (exp
, 1);
5458 t_chain
= CALL_EXPR_ARG (exp
, 2);
5460 r_descr
= expand_normal (t_descr
);
5461 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5462 MEM_NOTRAP_P (m_descr
) = 1;
5464 r_func
= expand_normal (t_func
);
5465 r_chain
= expand_normal (t_chain
);
5467 /* Generate insns to initialize the descriptor. */
5468 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5469 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5470 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5475 /* Expand a call to the builtin descriptor adjustment routine. */
5478 expand_builtin_adjust_descriptor (tree exp
)
5482 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5485 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5487 /* Unalign the descriptor to allow runtime identification. */
5488 tramp
= plus_constant (ptr_mode
, tramp
,
5489 targetm
.calls
.custom_function_descriptors
);
5491 return force_operand (tramp
, NULL_RTX
);
5494 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5495 function. The function first checks whether the back end provides
5496 an insn to implement signbit for the respective mode. If not, it
5497 checks whether the floating point format of the value is such that
5498 the sign bit can be extracted. If that is not the case, error out.
5499 EXP is the expression that is a call to the builtin function; if
5500 convenient, the result should be placed in TARGET. */
5502 expand_builtin_signbit (tree exp
, rtx target
)
5504 const struct real_format
*fmt
;
5505 scalar_float_mode fmode
;
5506 scalar_int_mode rmode
, imode
;
5509 enum insn_code icode
;
5511 location_t loc
= EXPR_LOCATION (exp
);
5513 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5516 arg
= CALL_EXPR_ARG (exp
, 0);
5517 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5518 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5519 fmt
= REAL_MODE_FORMAT (fmode
);
5521 arg
= builtin_save_expr (arg
);
5523 /* Expand the argument yielding a RTX expression. */
5524 temp
= expand_normal (arg
);
5526 /* Check if the back end provides an insn that handles signbit for the
5528 icode
= optab_handler (signbit_optab
, fmode
);
5529 if (icode
!= CODE_FOR_nothing
)
5531 rtx_insn
*last
= get_last_insn ();
5532 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5533 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5535 delete_insns_since (last
);
5538 /* For floating point formats without a sign bit, implement signbit
5540 bitpos
= fmt
->signbit_ro
;
5543 /* But we can't do this if the format supports signed zero. */
5544 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5546 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5547 build_real (TREE_TYPE (arg
), dconst0
));
5548 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5551 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5553 imode
= int_mode_for_mode (fmode
).require ();
5554 temp
= gen_lowpart (imode
, temp
);
5559 /* Handle targets with different FP word orders. */
5560 if (FLOAT_WORDS_BIG_ENDIAN
)
5561 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5563 word
= bitpos
/ BITS_PER_WORD
;
5564 temp
= operand_subword_force (temp
, word
, fmode
);
5565 bitpos
= bitpos
% BITS_PER_WORD
;
5568 /* Force the intermediate word_mode (or narrower) result into a
5569 register. This avoids attempting to create paradoxical SUBREGs
5570 of floating point modes below. */
5571 temp
= force_reg (imode
, temp
);
5573 /* If the bitpos is within the "result mode" lowpart, the operation
5574 can be implement with a single bitwise AND. Otherwise, we need
5575 a right shift and an AND. */
5577 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5579 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5581 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5582 temp
= gen_lowpart (rmode
, temp
);
5583 temp
= expand_binop (rmode
, and_optab
, temp
,
5584 immed_wide_int_const (mask
, rmode
),
5585 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5589 /* Perform a logical right shift to place the signbit in the least
5590 significant bit, then truncate the result to the desired mode
5591 and mask just this bit. */
5592 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5593 temp
= gen_lowpart (rmode
, temp
);
5594 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5595 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5601 /* Expand fork or exec calls. TARGET is the desired target of the
5602 call. EXP is the call. FN is the
5603 identificator of the actual function. IGNORE is nonzero if the
5604 value is to be ignored. */
5607 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5612 /* If we are not profiling, just call the function. */
5613 if (!profile_arc_flag
)
5616 /* Otherwise call the wrapper. This should be equivalent for the rest of
5617 compiler, so the code does not diverge, and the wrapper may run the
5618 code necessary for keeping the profiling sane. */
5620 switch (DECL_FUNCTION_CODE (fn
))
5623 id
= get_identifier ("__gcov_fork");
5626 case BUILT_IN_EXECL
:
5627 id
= get_identifier ("__gcov_execl");
5630 case BUILT_IN_EXECV
:
5631 id
= get_identifier ("__gcov_execv");
5634 case BUILT_IN_EXECLP
:
5635 id
= get_identifier ("__gcov_execlp");
5638 case BUILT_IN_EXECLE
:
5639 id
= get_identifier ("__gcov_execle");
5642 case BUILT_IN_EXECVP
:
5643 id
= get_identifier ("__gcov_execvp");
5646 case BUILT_IN_EXECVE
:
5647 id
= get_identifier ("__gcov_execve");
5654 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5655 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5656 DECL_EXTERNAL (decl
) = 1;
5657 TREE_PUBLIC (decl
) = 1;
5658 DECL_ARTIFICIAL (decl
) = 1;
5659 TREE_NOTHROW (decl
) = 1;
5660 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5661 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5662 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5663 return expand_call (call
, target
, ignore
);
5668 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5669 the pointer in these functions is void*, the tree optimizers may remove
5670 casts. The mode computed in expand_builtin isn't reliable either, due
5671 to __sync_bool_compare_and_swap.
5673 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5674 group of builtins. This gives us log2 of the mode size. */
5676 static inline machine_mode
5677 get_builtin_sync_mode (int fcode_diff
)
5679 /* The size is not negotiable, so ask not to get BLKmode in return
5680 if the target indicates that a smaller size would be better. */
5681 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5684 /* Expand the memory expression LOC and return the appropriate memory operand
5685 for the builtin_sync operations. */
5688 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5692 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5693 addr
= convert_memory_address (Pmode
, addr
);
5695 /* Note that we explicitly do not want any alias information for this
5696 memory, so that we kill all other live memories. Otherwise we don't
5697 satisfy the full barrier semantics of the intrinsic. */
5698 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5700 /* The alignment needs to be at least according to that of the mode. */
5701 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5702 get_pointer_alignment (loc
)));
5703 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5704 MEM_VOLATILE_P (mem
) = 1;
5709 /* Make sure an argument is in the right mode.
5710 EXP is the tree argument.
5711 MODE is the mode it should be in. */
5714 expand_expr_force_mode (tree exp
, machine_mode mode
)
5717 machine_mode old_mode
;
5719 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5720 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5721 of CONST_INTs, where we know the old_mode only from the call argument. */
5723 old_mode
= GET_MODE (val
);
5724 if (old_mode
== VOIDmode
)
5725 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5726 val
= convert_modes (mode
, old_mode
, val
, 1);
5731 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5732 EXP is the CALL_EXPR. CODE is the rtx code
5733 that corresponds to the arithmetic or logical operation from the name;
5734 an exception here is that NOT actually means NAND. TARGET is an optional
5735 place for us to store the results; AFTER is true if this is the
5736 fetch_and_xxx form. */
5739 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5740 enum rtx_code code
, bool after
,
5744 location_t loc
= EXPR_LOCATION (exp
);
5746 if (code
== NOT
&& warn_sync_nand
)
5748 tree fndecl
= get_callee_fndecl (exp
);
5749 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5751 static bool warned_f_a_n
, warned_n_a_f
;
5755 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5756 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5757 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5758 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5759 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5763 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5764 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5765 warned_f_a_n
= true;
5768 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5769 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5770 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5771 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5772 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5776 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5777 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5778 warned_n_a_f
= true;
5786 /* Expand the operands. */
5787 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5788 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5790 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5794 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5795 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5796 true if this is the boolean form. TARGET is a place for us to store the
5797 results; this is NOT optional if IS_BOOL is true. */
5800 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5801 bool is_bool
, rtx target
)
5803 rtx old_val
, new_val
, mem
;
5806 /* Expand the operands. */
5807 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5808 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5809 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5811 pbool
= poval
= NULL
;
5812 if (target
!= const0_rtx
)
5819 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5820 false, MEMMODEL_SYNC_SEQ_CST
,
5821 MEMMODEL_SYNC_SEQ_CST
))
5827 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5828 general form is actually an atomic exchange, and some targets only
5829 support a reduced form with the second argument being a constant 1.
5830 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5834 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5839 /* Expand the operands. */
5840 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5841 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5843 return expand_sync_lock_test_and_set (target
, mem
, val
);
5846 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5849 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5853 /* Expand the operands. */
5854 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5856 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5859 /* Given an integer representing an ``enum memmodel'', verify its
5860 correctness and return the memory model enum. */
5862 static enum memmodel
5863 get_memmodel (tree exp
)
5866 unsigned HOST_WIDE_INT val
;
5868 = expansion_point_location_if_in_system_header (input_location
);
5870 /* If the parameter is not a constant, it's a run time value so we'll just
5871 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5872 if (TREE_CODE (exp
) != INTEGER_CST
)
5873 return MEMMODEL_SEQ_CST
;
5875 op
= expand_normal (exp
);
5878 if (targetm
.memmodel_check
)
5879 val
= targetm
.memmodel_check (val
);
5880 else if (val
& ~MEMMODEL_MASK
)
5882 warning_at (loc
, OPT_Winvalid_memory_model
,
5883 "unknown architecture specifier in memory model to builtin");
5884 return MEMMODEL_SEQ_CST
;
5887 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5888 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5890 warning_at (loc
, OPT_Winvalid_memory_model
,
5891 "invalid memory model argument to builtin");
5892 return MEMMODEL_SEQ_CST
;
5895 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5896 be conservative and promote consume to acquire. */
5897 if (val
== MEMMODEL_CONSUME
)
5898 val
= MEMMODEL_ACQUIRE
;
5900 return (enum memmodel
) val
;
5903 /* Expand the __atomic_exchange intrinsic:
5904 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5905 EXP is the CALL_EXPR.
5906 TARGET is an optional place for us to store the results. */
5909 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5912 enum memmodel model
;
5914 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5916 if (!flag_inline_atomics
)
5919 /* Expand the operands. */
5920 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5921 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5923 return expand_atomic_exchange (target
, mem
, val
, model
);
5926 /* Expand the __atomic_compare_exchange intrinsic:
5927 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5928 TYPE desired, BOOL weak,
5929 enum memmodel success,
5930 enum memmodel failure)
5931 EXP is the CALL_EXPR.
5932 TARGET is an optional place for us to store the results. */
5935 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5938 rtx expect
, desired
, mem
, oldval
;
5939 rtx_code_label
*label
;
5940 enum memmodel success
, failure
;
5944 = expansion_point_location_if_in_system_header (input_location
);
5946 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5947 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5949 if (failure
> success
)
5951 warning_at (loc
, OPT_Winvalid_memory_model
,
5952 "failure memory model cannot be stronger than success "
5953 "memory model for %<__atomic_compare_exchange%>");
5954 success
= MEMMODEL_SEQ_CST
;
5957 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5959 warning_at (loc
, OPT_Winvalid_memory_model
,
5960 "invalid failure memory model for "
5961 "%<__atomic_compare_exchange%>");
5962 failure
= MEMMODEL_SEQ_CST
;
5963 success
= MEMMODEL_SEQ_CST
;
5967 if (!flag_inline_atomics
)
5970 /* Expand the operands. */
5971 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5973 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5974 expect
= convert_memory_address (Pmode
, expect
);
5975 expect
= gen_rtx_MEM (mode
, expect
);
5976 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5978 weak
= CALL_EXPR_ARG (exp
, 3);
5980 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5983 if (target
== const0_rtx
)
5986 /* Lest the rtl backend create a race condition with an imporoper store
5987 to memory, always create a new pseudo for OLDVAL. */
5990 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5991 is_weak
, success
, failure
))
5994 /* Conditionally store back to EXPECT, lest we create a race condition
5995 with an improper store to memory. */
5996 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5997 the normal case where EXPECT is totally private, i.e. a register. At
5998 which point the store can be unconditional. */
5999 label
= gen_label_rtx ();
6000 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
6001 GET_MODE (target
), 1, label
);
6002 emit_move_insn (expect
, oldval
);
6008 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6009 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6010 call. The weak parameter must be dropped to match the expected parameter
6011 list and the expected argument changed from value to pointer to memory
6015 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
6018 vec
<tree
, va_gc
> *vec
;
6021 vec
->quick_push (gimple_call_arg (call
, 0));
6022 tree expected
= gimple_call_arg (call
, 1);
6023 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
6024 TREE_TYPE (expected
));
6025 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
6027 emit_move_insn (x
, expd
);
6028 tree v
= make_tree (TREE_TYPE (expected
), x
);
6029 vec
->quick_push (build1 (ADDR_EXPR
,
6030 build_pointer_type (TREE_TYPE (expected
)), v
));
6031 vec
->quick_push (gimple_call_arg (call
, 2));
6032 /* Skip the boolean weak parameter. */
6033 for (z
= 4; z
< 6; z
++)
6034 vec
->quick_push (gimple_call_arg (call
, z
));
6035 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6036 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
6037 gcc_assert (bytes_log2
< 5);
6038 built_in_function fncode
6039 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6041 tree fndecl
= builtin_decl_explicit (fncode
);
6042 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
6044 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
6045 tree lhs
= gimple_call_lhs (call
);
6046 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
6049 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6050 if (GET_MODE (boolret
) != mode
)
6051 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6052 x
= force_reg (mode
, x
);
6053 write_complex_part (target
, boolret
, true);
6054 write_complex_part (target
, x
, false);
6058 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6061 expand_ifn_atomic_compare_exchange (gcall
*call
)
6063 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6064 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6065 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6066 rtx expect
, desired
, mem
, oldval
, boolret
;
6067 enum memmodel success
, failure
;
6071 = expansion_point_location_if_in_system_header (gimple_location (call
));
6073 success
= get_memmodel (gimple_call_arg (call
, 4));
6074 failure
= get_memmodel (gimple_call_arg (call
, 5));
6076 if (failure
> success
)
6078 warning_at (loc
, OPT_Winvalid_memory_model
,
6079 "failure memory model cannot be stronger than success "
6080 "memory model for %<__atomic_compare_exchange%>");
6081 success
= MEMMODEL_SEQ_CST
;
6084 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6086 warning_at (loc
, OPT_Winvalid_memory_model
,
6087 "invalid failure memory model for "
6088 "%<__atomic_compare_exchange%>");
6089 failure
= MEMMODEL_SEQ_CST
;
6090 success
= MEMMODEL_SEQ_CST
;
6093 if (!flag_inline_atomics
)
6095 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6099 /* Expand the operands. */
6100 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6102 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6103 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6105 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6110 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6111 is_weak
, success
, failure
))
6113 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6117 lhs
= gimple_call_lhs (call
);
6120 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6121 if (GET_MODE (boolret
) != mode
)
6122 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6123 write_complex_part (target
, boolret
, true);
6124 write_complex_part (target
, oldval
, false);
6128 /* Expand the __atomic_load intrinsic:
6129 TYPE __atomic_load (TYPE *object, enum memmodel)
6130 EXP is the CALL_EXPR.
6131 TARGET is an optional place for us to store the results. */
6134 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6137 enum memmodel model
;
6139 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6140 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6143 = expansion_point_location_if_in_system_header (input_location
);
6144 warning_at (loc
, OPT_Winvalid_memory_model
,
6145 "invalid memory model for %<__atomic_load%>");
6146 model
= MEMMODEL_SEQ_CST
;
6149 if (!flag_inline_atomics
)
6152 /* Expand the operand. */
6153 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6155 return expand_atomic_load (target
, mem
, model
);
6159 /* Expand the __atomic_store intrinsic:
6160 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6161 EXP is the CALL_EXPR.
6162 TARGET is an optional place for us to store the results. */
6165 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6168 enum memmodel model
;
6170 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6171 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6172 || is_mm_release (model
)))
6175 = expansion_point_location_if_in_system_header (input_location
);
6176 warning_at (loc
, OPT_Winvalid_memory_model
,
6177 "invalid memory model for %<__atomic_store%>");
6178 model
= MEMMODEL_SEQ_CST
;
6181 if (!flag_inline_atomics
)
6184 /* Expand the operands. */
6185 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6186 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6188 return expand_atomic_store (mem
, val
, model
, false);
6191 /* Expand the __atomic_fetch_XXX intrinsic:
6192 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6193 EXP is the CALL_EXPR.
6194 TARGET is an optional place for us to store the results.
6195 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6196 FETCH_AFTER is true if returning the result of the operation.
6197 FETCH_AFTER is false if returning the value before the operation.
6198 IGNORE is true if the result is not used.
6199 EXT_CALL is the correct builtin for an external call if this cannot be
6200 resolved to an instruction sequence. */
6203 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6204 enum rtx_code code
, bool fetch_after
,
6205 bool ignore
, enum built_in_function ext_call
)
6208 enum memmodel model
;
6212 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6214 /* Expand the operands. */
6215 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6216 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6218 /* Only try generating instructions if inlining is turned on. */
6219 if (flag_inline_atomics
)
6221 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6226 /* Return if a different routine isn't needed for the library call. */
6227 if (ext_call
== BUILT_IN_NONE
)
6230 /* Change the call to the specified function. */
6231 fndecl
= get_callee_fndecl (exp
);
6232 addr
= CALL_EXPR_FN (exp
);
6235 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6236 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6238 /* If we will emit code after the call, the call can not be a tail call.
6239 If it is emitted as a tail call, a barrier is emitted after it, and
6240 then all trailing code is removed. */
6242 CALL_EXPR_TAILCALL (exp
) = 0;
6244 /* Expand the call here so we can emit trailing code. */
6245 ret
= expand_call (exp
, target
, ignore
);
6247 /* Replace the original function just in case it matters. */
6248 TREE_OPERAND (addr
, 0) = fndecl
;
6250 /* Then issue the arithmetic correction to return the right result. */
6255 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6257 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6260 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6266 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6269 expand_ifn_atomic_bit_test_and (gcall
*call
)
6271 tree ptr
= gimple_call_arg (call
, 0);
6272 tree bit
= gimple_call_arg (call
, 1);
6273 tree flag
= gimple_call_arg (call
, 2);
6274 tree lhs
= gimple_call_lhs (call
);
6275 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6276 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6279 struct expand_operand ops
[5];
6281 gcc_assert (flag_inline_atomics
);
6283 if (gimple_call_num_args (call
) == 4)
6284 model
= get_memmodel (gimple_call_arg (call
, 3));
6286 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6287 rtx val
= expand_expr_force_mode (bit
, mode
);
6289 switch (gimple_call_internal_fn (call
))
6291 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6293 optab
= atomic_bit_test_and_set_optab
;
6295 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6297 optab
= atomic_bit_test_and_complement_optab
;
6299 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6301 optab
= atomic_bit_test_and_reset_optab
;
6307 if (lhs
== NULL_TREE
)
6309 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6310 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6312 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6313 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6317 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6318 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6319 gcc_assert (icode
!= CODE_FOR_nothing
);
6320 create_output_operand (&ops
[0], target
, mode
);
6321 create_fixed_operand (&ops
[1], mem
);
6322 create_convert_operand_to (&ops
[2], val
, mode
, true);
6323 create_integer_operand (&ops
[3], model
);
6324 create_integer_operand (&ops
[4], integer_onep (flag
));
6325 if (maybe_expand_insn (icode
, 5, ops
))
6329 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6330 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6333 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6334 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6335 code
, model
, false);
6336 if (integer_onep (flag
))
6338 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6339 NULL_RTX
, true, OPTAB_DIRECT
);
6340 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6341 true, OPTAB_DIRECT
);
6344 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6346 if (result
!= target
)
6347 emit_move_insn (target
, result
);
6350 /* Expand an atomic clear operation.
6351 void _atomic_clear (BOOL *obj, enum memmodel)
6352 EXP is the call expression. */
6355 expand_builtin_atomic_clear (tree exp
)
6359 enum memmodel model
;
6361 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6362 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6363 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6365 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6368 = expansion_point_location_if_in_system_header (input_location
);
6369 warning_at (loc
, OPT_Winvalid_memory_model
,
6370 "invalid memory model for %<__atomic_store%>");
6371 model
= MEMMODEL_SEQ_CST
;
6374 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6375 Failing that, a store is issued by __atomic_store. The only way this can
6376 fail is if the bool type is larger than a word size. Unlikely, but
6377 handle it anyway for completeness. Assume a single threaded model since
6378 there is no atomic support in this case, and no barriers are required. */
6379 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6381 emit_move_insn (mem
, const0_rtx
);
6385 /* Expand an atomic test_and_set operation.
6386 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6387 EXP is the call expression. */
6390 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6393 enum memmodel model
;
6396 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6397 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6398 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6400 return expand_atomic_test_and_set (target
, mem
, model
);
6404 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6405 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6408 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6412 unsigned int mode_align
, type_align
;
6414 if (TREE_CODE (arg0
) != INTEGER_CST
)
6417 /* We need a corresponding integer mode for the access to be lock-free. */
6418 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6419 if (!int_mode_for_size (size
, 0).exists (&mode
))
6420 return boolean_false_node
;
6422 mode_align
= GET_MODE_ALIGNMENT (mode
);
6424 if (TREE_CODE (arg1
) == INTEGER_CST
)
6426 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6428 /* Either this argument is null, or it's a fake pointer encoding
6429 the alignment of the object. */
6430 val
= least_bit_hwi (val
);
6431 val
*= BITS_PER_UNIT
;
6433 if (val
== 0 || mode_align
< val
)
6434 type_align
= mode_align
;
6440 tree ttype
= TREE_TYPE (arg1
);
6442 /* This function is usually invoked and folded immediately by the front
6443 end before anything else has a chance to look at it. The pointer
6444 parameter at this point is usually cast to a void *, so check for that
6445 and look past the cast. */
6446 if (CONVERT_EXPR_P (arg1
)
6447 && POINTER_TYPE_P (ttype
)
6448 && VOID_TYPE_P (TREE_TYPE (ttype
))
6449 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6450 arg1
= TREE_OPERAND (arg1
, 0);
6452 ttype
= TREE_TYPE (arg1
);
6453 gcc_assert (POINTER_TYPE_P (ttype
));
6455 /* Get the underlying type of the object. */
6456 ttype
= TREE_TYPE (ttype
);
6457 type_align
= TYPE_ALIGN (ttype
);
6460 /* If the object has smaller alignment, the lock free routines cannot
6462 if (type_align
< mode_align
)
6463 return boolean_false_node
;
6465 /* Check if a compare_and_swap pattern exists for the mode which represents
6466 the required size. The pattern is not allowed to fail, so the existence
6467 of the pattern indicates support is present. Also require that an
6468 atomic load exists for the required size. */
6469 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6470 return boolean_true_node
;
6472 return boolean_false_node
;
6475 /* Return true if the parameters to call EXP represent an object which will
6476 always generate lock free instructions. The first argument represents the
6477 size of the object, and the second parameter is a pointer to the object
6478 itself. If NULL is passed for the object, then the result is based on
6479 typical alignment for an object of the specified size. Otherwise return
6483 expand_builtin_atomic_always_lock_free (tree exp
)
6486 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6487 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6489 if (TREE_CODE (arg0
) != INTEGER_CST
)
6491 error ("non-constant argument 1 to __atomic_always_lock_free");
6495 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6496 if (size
== boolean_true_node
)
6501 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6502 is lock free on this architecture. */
6505 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6507 if (!flag_inline_atomics
)
6510 /* If it isn't always lock free, don't generate a result. */
6511 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6512 return boolean_true_node
;
6517 /* Return true if the parameters to call EXP represent an object which will
6518 always generate lock free instructions. The first argument represents the
6519 size of the object, and the second parameter is a pointer to the object
6520 itself. If NULL is passed for the object, then the result is based on
6521 typical alignment for an object of the specified size. Otherwise return
6525 expand_builtin_atomic_is_lock_free (tree exp
)
6528 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6529 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6531 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6533 error ("non-integer argument 1 to __atomic_is_lock_free");
6537 if (!flag_inline_atomics
)
6540 /* If the value is known at compile time, return the RTX for it. */
6541 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6542 if (size
== boolean_true_node
)
6548 /* Expand the __atomic_thread_fence intrinsic:
6549 void __atomic_thread_fence (enum memmodel)
6550 EXP is the CALL_EXPR. */
6553 expand_builtin_atomic_thread_fence (tree exp
)
6555 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6556 expand_mem_thread_fence (model
);
6559 /* Expand the __atomic_signal_fence intrinsic:
6560 void __atomic_signal_fence (enum memmodel)
6561 EXP is the CALL_EXPR. */
6564 expand_builtin_atomic_signal_fence (tree exp
)
6566 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6567 expand_mem_signal_fence (model
);
6570 /* Expand the __sync_synchronize intrinsic. */
6573 expand_builtin_sync_synchronize (void)
6575 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6579 expand_builtin_thread_pointer (tree exp
, rtx target
)
6581 enum insn_code icode
;
6582 if (!validate_arglist (exp
, VOID_TYPE
))
6584 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6585 if (icode
!= CODE_FOR_nothing
)
6587 struct expand_operand op
;
6588 /* If the target is not sutitable then create a new target. */
6589 if (target
== NULL_RTX
6591 || GET_MODE (target
) != Pmode
)
6592 target
= gen_reg_rtx (Pmode
);
6593 create_output_operand (&op
, target
, Pmode
);
6594 expand_insn (icode
, 1, &op
);
6597 error ("__builtin_thread_pointer is not supported on this target");
6602 expand_builtin_set_thread_pointer (tree exp
)
6604 enum insn_code icode
;
6605 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6607 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6608 if (icode
!= CODE_FOR_nothing
)
6610 struct expand_operand op
;
6611 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6612 Pmode
, EXPAND_NORMAL
);
6613 create_input_operand (&op
, val
, Pmode
);
6614 expand_insn (icode
, 1, &op
);
6617 error ("__builtin_set_thread_pointer is not supported on this target");
6621 /* Emit code to restore the current value of stack. */
6624 expand_stack_restore (tree var
)
6627 rtx sa
= expand_normal (var
);
6629 sa
= convert_memory_address (Pmode
, sa
);
6631 prev
= get_last_insn ();
6632 emit_stack_restore (SAVE_BLOCK
, sa
);
6634 record_new_stack_level ();
6636 fixup_args_size_notes (prev
, get_last_insn (), 0);
6639 /* Emit code to save the current value of stack. */
6642 expand_stack_save (void)
6646 emit_stack_save (SAVE_BLOCK
, &ret
);
6650 /* Emit code to get the openacc gang, worker or vector id or size. */
6653 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
6656 rtx fallback_retval
;
6657 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
6658 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
6660 case BUILT_IN_GOACC_PARLEVEL_ID
:
6661 name
= "__builtin_goacc_parlevel_id";
6662 fallback_retval
= const0_rtx
;
6663 gen_fn
= targetm
.gen_oacc_dim_pos
;
6665 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
6666 name
= "__builtin_goacc_parlevel_size";
6667 fallback_retval
= const1_rtx
;
6668 gen_fn
= targetm
.gen_oacc_dim_size
;
6674 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
6676 error ("%qs only supported in OpenACC code", name
);
6680 tree arg
= CALL_EXPR_ARG (exp
, 0);
6681 if (TREE_CODE (arg
) != INTEGER_CST
)
6683 error ("non-constant argument 0 to %qs", name
);
6687 int dim
= TREE_INT_CST_LOW (arg
);
6691 case GOMP_DIM_WORKER
:
6692 case GOMP_DIM_VECTOR
:
6695 error ("illegal argument 0 to %qs", name
);
6702 if (target
== NULL_RTX
)
6703 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6705 if (!targetm
.have_oacc_dim_size ())
6707 emit_move_insn (target
, fallback_retval
);
6711 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
6712 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
6714 emit_move_insn (target
, reg
);
6719 /* Expand an expression EXP that calls a built-in function,
6720 with result going to TARGET if that's convenient
6721 (and in mode MODE if that's convenient).
6722 SUBTARGET may be used as the target for computing one of EXP's operands.
6723 IGNORE is nonzero if the value is to be ignored. */
6726 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
6729 tree fndecl
= get_callee_fndecl (exp
);
6730 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6731 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6734 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6735 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6737 /* When ASan is enabled, we don't want to expand some memory/string
6738 builtins and rely on libsanitizer's hooks. This allows us to avoid
6739 redundant checks and be sure, that possible overflow will be detected
6742 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
6743 return expand_call (exp
, target
, ignore
);
6745 /* When not optimizing, generate calls to library functions for a certain
6748 && !called_as_built_in (fndecl
)
6749 && fcode
!= BUILT_IN_FORK
6750 && fcode
!= BUILT_IN_EXECL
6751 && fcode
!= BUILT_IN_EXECV
6752 && fcode
!= BUILT_IN_EXECLP
6753 && fcode
!= BUILT_IN_EXECLE
6754 && fcode
!= BUILT_IN_EXECVP
6755 && fcode
!= BUILT_IN_EXECVE
6756 && !ALLOCA_FUNCTION_CODE_P (fcode
)
6757 && fcode
!= BUILT_IN_FREE
)
6758 return expand_call (exp
, target
, ignore
);
6760 /* The built-in function expanders test for target == const0_rtx
6761 to determine whether the function's result will be ignored. */
6763 target
= const0_rtx
;
6765 /* If the result of a pure or const built-in function is ignored, and
6766 none of its arguments are volatile, we can avoid expanding the
6767 built-in call and just evaluate the arguments for side-effects. */
6768 if (target
== const0_rtx
6769 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6770 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6772 bool volatilep
= false;
6774 call_expr_arg_iterator iter
;
6776 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6777 if (TREE_THIS_VOLATILE (arg
))
6785 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6786 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6793 CASE_FLT_FN (BUILT_IN_FABS
):
6794 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
6795 case BUILT_IN_FABSD32
:
6796 case BUILT_IN_FABSD64
:
6797 case BUILT_IN_FABSD128
:
6798 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6803 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6804 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
6805 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6810 /* Just do a normal library call if we were unable to fold
6812 CASE_FLT_FN (BUILT_IN_CABS
):
6815 CASE_FLT_FN (BUILT_IN_FMA
):
6816 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
6817 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6822 CASE_FLT_FN (BUILT_IN_ILOGB
):
6823 if (! flag_unsafe_math_optimizations
)
6826 CASE_FLT_FN (BUILT_IN_ISINF
):
6827 CASE_FLT_FN (BUILT_IN_FINITE
):
6828 case BUILT_IN_ISFINITE
:
6829 case BUILT_IN_ISNORMAL
:
6830 target
= expand_builtin_interclass_mathfn (exp
, target
);
6835 CASE_FLT_FN (BUILT_IN_ICEIL
):
6836 CASE_FLT_FN (BUILT_IN_LCEIL
):
6837 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6838 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6839 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6840 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6841 target
= expand_builtin_int_roundingfn (exp
, target
);
6846 CASE_FLT_FN (BUILT_IN_IRINT
):
6847 CASE_FLT_FN (BUILT_IN_LRINT
):
6848 CASE_FLT_FN (BUILT_IN_LLRINT
):
6849 CASE_FLT_FN (BUILT_IN_IROUND
):
6850 CASE_FLT_FN (BUILT_IN_LROUND
):
6851 CASE_FLT_FN (BUILT_IN_LLROUND
):
6852 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6857 CASE_FLT_FN (BUILT_IN_POWI
):
6858 target
= expand_builtin_powi (exp
, target
);
6863 CASE_FLT_FN (BUILT_IN_CEXPI
):
6864 target
= expand_builtin_cexpi (exp
, target
);
6865 gcc_assert (target
);
6868 CASE_FLT_FN (BUILT_IN_SIN
):
6869 CASE_FLT_FN (BUILT_IN_COS
):
6870 if (! flag_unsafe_math_optimizations
)
6872 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6877 CASE_FLT_FN (BUILT_IN_SINCOS
):
6878 if (! flag_unsafe_math_optimizations
)
6880 target
= expand_builtin_sincos (exp
);
6885 case BUILT_IN_APPLY_ARGS
:
6886 return expand_builtin_apply_args ();
6888 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6889 FUNCTION with a copy of the parameters described by
6890 ARGUMENTS, and ARGSIZE. It returns a block of memory
6891 allocated on the stack into which is stored all the registers
6892 that might possibly be used for returning the result of a
6893 function. ARGUMENTS is the value returned by
6894 __builtin_apply_args. ARGSIZE is the number of bytes of
6895 arguments that must be copied. ??? How should this value be
6896 computed? We'll also need a safe worst case value for varargs
6898 case BUILT_IN_APPLY
:
6899 if (!validate_arglist (exp
, POINTER_TYPE
,
6900 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6901 && !validate_arglist (exp
, REFERENCE_TYPE
,
6902 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6908 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6909 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6910 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6912 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6915 /* __builtin_return (RESULT) causes the function to return the
6916 value described by RESULT. RESULT is address of the block of
6917 memory returned by __builtin_apply. */
6918 case BUILT_IN_RETURN
:
6919 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6920 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6923 case BUILT_IN_SAVEREGS
:
6924 return expand_builtin_saveregs ();
6926 case BUILT_IN_VA_ARG_PACK
:
6927 /* All valid uses of __builtin_va_arg_pack () are removed during
6929 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6932 case BUILT_IN_VA_ARG_PACK_LEN
:
6933 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6935 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6938 /* Return the address of the first anonymous stack arg. */
6939 case BUILT_IN_NEXT_ARG
:
6940 if (fold_builtin_next_arg (exp
, false))
6942 return expand_builtin_next_arg ();
6944 case BUILT_IN_CLEAR_CACHE
:
6945 target
= expand_builtin___clear_cache (exp
);
6950 case BUILT_IN_CLASSIFY_TYPE
:
6951 return expand_builtin_classify_type (exp
);
6953 case BUILT_IN_CONSTANT_P
:
6956 case BUILT_IN_FRAME_ADDRESS
:
6957 case BUILT_IN_RETURN_ADDRESS
:
6958 return expand_builtin_frame_address (fndecl
, exp
);
6960 /* Returns the address of the area where the structure is returned.
6962 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6963 if (call_expr_nargs (exp
) != 0
6964 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6965 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6968 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6970 CASE_BUILT_IN_ALLOCA
:
6971 target
= expand_builtin_alloca (exp
);
6976 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
6977 return expand_asan_emit_allocas_unpoison (exp
);
6979 case BUILT_IN_STACK_SAVE
:
6980 return expand_stack_save ();
6982 case BUILT_IN_STACK_RESTORE
:
6983 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6986 case BUILT_IN_BSWAP16
:
6987 case BUILT_IN_BSWAP32
:
6988 case BUILT_IN_BSWAP64
:
6989 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6994 CASE_INT_FN (BUILT_IN_FFS
):
6995 target
= expand_builtin_unop (target_mode
, exp
, target
,
6996 subtarget
, ffs_optab
);
7001 CASE_INT_FN (BUILT_IN_CLZ
):
7002 target
= expand_builtin_unop (target_mode
, exp
, target
,
7003 subtarget
, clz_optab
);
7008 CASE_INT_FN (BUILT_IN_CTZ
):
7009 target
= expand_builtin_unop (target_mode
, exp
, target
,
7010 subtarget
, ctz_optab
);
7015 CASE_INT_FN (BUILT_IN_CLRSB
):
7016 target
= expand_builtin_unop (target_mode
, exp
, target
,
7017 subtarget
, clrsb_optab
);
7022 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7023 target
= expand_builtin_unop (target_mode
, exp
, target
,
7024 subtarget
, popcount_optab
);
7029 CASE_INT_FN (BUILT_IN_PARITY
):
7030 target
= expand_builtin_unop (target_mode
, exp
, target
,
7031 subtarget
, parity_optab
);
7036 case BUILT_IN_STRLEN
:
7037 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7042 case BUILT_IN_STRNLEN
:
7043 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
7048 case BUILT_IN_STRCAT
:
7049 target
= expand_builtin_strcat (exp
, target
);
7054 case BUILT_IN_STRCPY
:
7055 target
= expand_builtin_strcpy (exp
, target
);
7060 case BUILT_IN_STRNCAT
:
7061 target
= expand_builtin_strncat (exp
, target
);
7066 case BUILT_IN_STRNCPY
:
7067 target
= expand_builtin_strncpy (exp
, target
);
7072 case BUILT_IN_STPCPY
:
7073 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7078 case BUILT_IN_STPNCPY
:
7079 target
= expand_builtin_stpncpy (exp
, target
);
7084 case BUILT_IN_MEMCHR
:
7085 target
= expand_builtin_memchr (exp
, target
);
7090 case BUILT_IN_MEMCPY
:
7091 target
= expand_builtin_memcpy (exp
, target
);
7096 case BUILT_IN_MEMMOVE
:
7097 target
= expand_builtin_memmove (exp
, target
);
7102 case BUILT_IN_MEMPCPY
:
7103 target
= expand_builtin_mempcpy (exp
, target
);
7108 case BUILT_IN_MEMSET
:
7109 target
= expand_builtin_memset (exp
, target
, mode
);
7114 case BUILT_IN_BZERO
:
7115 target
= expand_builtin_bzero (exp
);
7120 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7121 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7122 when changing it to a strcmp call. */
7123 case BUILT_IN_STRCMP_EQ
:
7124 target
= expand_builtin_memcmp (exp
, target
, true);
7128 /* Change this call back to a BUILT_IN_STRCMP. */
7129 TREE_OPERAND (exp
, 1)
7130 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
7132 /* Delete the last parameter. */
7134 vec
<tree
, va_gc
> *arg_vec
;
7135 vec_alloc (arg_vec
, 2);
7136 for (i
= 0; i
< 2; i
++)
7137 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
7138 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
7141 case BUILT_IN_STRCMP
:
7142 target
= expand_builtin_strcmp (exp
, target
);
7147 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7148 back to a BUILT_IN_STRNCMP. */
7149 case BUILT_IN_STRNCMP_EQ
:
7150 target
= expand_builtin_memcmp (exp
, target
, true);
7154 /* Change it back to a BUILT_IN_STRNCMP. */
7155 TREE_OPERAND (exp
, 1)
7156 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
7159 case BUILT_IN_STRNCMP
:
7160 target
= expand_builtin_strncmp (exp
, target
, mode
);
7166 case BUILT_IN_MEMCMP
:
7167 case BUILT_IN_MEMCMP_EQ
:
7168 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7171 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7173 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7174 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7178 case BUILT_IN_SETJMP
:
7179 /* This should have been lowered to the builtins below. */
7182 case BUILT_IN_SETJMP_SETUP
:
7183 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7184 and the receiver label. */
7185 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7187 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7188 VOIDmode
, EXPAND_NORMAL
);
7189 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7190 rtx_insn
*label_r
= label_rtx (label
);
7192 /* This is copied from the handling of non-local gotos. */
7193 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7194 nonlocal_goto_handler_labels
7195 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7196 nonlocal_goto_handler_labels
);
7197 /* ??? Do not let expand_label treat us as such since we would
7198 not want to be both on the list of non-local labels and on
7199 the list of forced labels. */
7200 FORCED_LABEL (label
) = 0;
7205 case BUILT_IN_SETJMP_RECEIVER
:
7206 /* __builtin_setjmp_receiver is passed the receiver label. */
7207 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7209 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7210 rtx_insn
*label_r
= label_rtx (label
);
7212 expand_builtin_setjmp_receiver (label_r
);
7217 /* __builtin_longjmp is passed a pointer to an array of five words.
7218 It's similar to the C library longjmp function but works with
7219 __builtin_setjmp above. */
7220 case BUILT_IN_LONGJMP
:
7221 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7223 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7224 VOIDmode
, EXPAND_NORMAL
);
7225 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7227 if (value
!= const1_rtx
)
7229 error ("%<__builtin_longjmp%> second argument must be 1");
7233 expand_builtin_longjmp (buf_addr
, value
);
7238 case BUILT_IN_NONLOCAL_GOTO
:
7239 target
= expand_builtin_nonlocal_goto (exp
);
7244 /* This updates the setjmp buffer that is its argument with the value
7245 of the current stack pointer. */
7246 case BUILT_IN_UPDATE_SETJMP_BUF
:
7247 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7250 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7252 expand_builtin_update_setjmp_buf (buf_addr
);
7258 expand_builtin_trap ();
7261 case BUILT_IN_UNREACHABLE
:
7262 expand_builtin_unreachable ();
7265 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7266 case BUILT_IN_SIGNBITD32
:
7267 case BUILT_IN_SIGNBITD64
:
7268 case BUILT_IN_SIGNBITD128
:
7269 target
= expand_builtin_signbit (exp
, target
);
7274 /* Various hooks for the DWARF 2 __throw routine. */
7275 case BUILT_IN_UNWIND_INIT
:
7276 expand_builtin_unwind_init ();
7278 case BUILT_IN_DWARF_CFA
:
7279 return virtual_cfa_rtx
;
7280 #ifdef DWARF2_UNWIND_INFO
7281 case BUILT_IN_DWARF_SP_COLUMN
:
7282 return expand_builtin_dwarf_sp_column ();
7283 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7284 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7287 case BUILT_IN_FROB_RETURN_ADDR
:
7288 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7289 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7290 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7291 case BUILT_IN_EH_RETURN
:
7292 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7293 CALL_EXPR_ARG (exp
, 1));
7295 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7296 return expand_builtin_eh_return_data_regno (exp
);
7297 case BUILT_IN_EXTEND_POINTER
:
7298 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7299 case BUILT_IN_EH_POINTER
:
7300 return expand_builtin_eh_pointer (exp
);
7301 case BUILT_IN_EH_FILTER
:
7302 return expand_builtin_eh_filter (exp
);
7303 case BUILT_IN_EH_COPY_VALUES
:
7304 return expand_builtin_eh_copy_values (exp
);
7306 case BUILT_IN_VA_START
:
7307 return expand_builtin_va_start (exp
);
7308 case BUILT_IN_VA_END
:
7309 return expand_builtin_va_end (exp
);
7310 case BUILT_IN_VA_COPY
:
7311 return expand_builtin_va_copy (exp
);
7312 case BUILT_IN_EXPECT
:
7313 return expand_builtin_expect (exp
, target
);
7314 case BUILT_IN_ASSUME_ALIGNED
:
7315 return expand_builtin_assume_aligned (exp
, target
);
7316 case BUILT_IN_PREFETCH
:
7317 expand_builtin_prefetch (exp
);
7320 case BUILT_IN_INIT_TRAMPOLINE
:
7321 return expand_builtin_init_trampoline (exp
, true);
7322 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7323 return expand_builtin_init_trampoline (exp
, false);
7324 case BUILT_IN_ADJUST_TRAMPOLINE
:
7325 return expand_builtin_adjust_trampoline (exp
);
7327 case BUILT_IN_INIT_DESCRIPTOR
:
7328 return expand_builtin_init_descriptor (exp
);
7329 case BUILT_IN_ADJUST_DESCRIPTOR
:
7330 return expand_builtin_adjust_descriptor (exp
);
7333 case BUILT_IN_EXECL
:
7334 case BUILT_IN_EXECV
:
7335 case BUILT_IN_EXECLP
:
7336 case BUILT_IN_EXECLE
:
7337 case BUILT_IN_EXECVP
:
7338 case BUILT_IN_EXECVE
:
7339 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7344 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7345 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7346 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7347 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7348 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7349 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7350 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7355 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7356 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7357 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7358 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7359 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7360 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7361 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7366 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7367 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7368 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7369 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7370 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7371 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7372 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7377 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7378 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7379 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7380 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7381 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7382 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7383 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7388 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7389 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7390 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7391 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7392 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7393 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7394 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7399 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7400 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7401 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7402 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7403 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7404 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7405 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7410 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7411 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7412 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7413 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7414 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7415 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7416 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7421 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7422 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7423 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7424 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7425 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7426 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7427 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7432 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7433 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7434 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7435 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7436 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7437 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7438 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7443 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7444 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7445 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7446 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7447 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7448 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7449 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7454 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7455 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7456 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7457 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7458 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7459 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7460 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7465 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7466 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7467 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7468 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7469 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7470 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7471 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7476 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7477 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7478 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7479 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7481 if (mode
== VOIDmode
)
7482 mode
= TYPE_MODE (boolean_type_node
);
7483 if (!target
|| !register_operand (target
, mode
))
7484 target
= gen_reg_rtx (mode
);
7486 mode
= get_builtin_sync_mode
7487 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7488 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7493 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7494 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7495 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7496 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7498 mode
= get_builtin_sync_mode
7499 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7500 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7505 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7506 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7507 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7508 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7510 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7511 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7516 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
7517 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
7518 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
7519 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
7520 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
7521 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
7522 expand_builtin_sync_lock_release (mode
, exp
);
7525 case BUILT_IN_SYNC_SYNCHRONIZE
:
7526 expand_builtin_sync_synchronize ();
7529 case BUILT_IN_ATOMIC_EXCHANGE_1
:
7530 case BUILT_IN_ATOMIC_EXCHANGE_2
:
7531 case BUILT_IN_ATOMIC_EXCHANGE_4
:
7532 case BUILT_IN_ATOMIC_EXCHANGE_8
:
7533 case BUILT_IN_ATOMIC_EXCHANGE_16
:
7534 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
7535 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
7540 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
7541 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
7542 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
7543 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
7544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
7546 unsigned int nargs
, z
;
7547 vec
<tree
, va_gc
> *vec
;
7550 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
7551 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
7555 /* If this is turned into an external library call, the weak parameter
7556 must be dropped to match the expected parameter list. */
7557 nargs
= call_expr_nargs (exp
);
7558 vec_alloc (vec
, nargs
- 1);
7559 for (z
= 0; z
< 3; z
++)
7560 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7561 /* Skip the boolean weak parameter. */
7562 for (z
= 4; z
< 6; z
++)
7563 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7564 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
7568 case BUILT_IN_ATOMIC_LOAD_1
:
7569 case BUILT_IN_ATOMIC_LOAD_2
:
7570 case BUILT_IN_ATOMIC_LOAD_4
:
7571 case BUILT_IN_ATOMIC_LOAD_8
:
7572 case BUILT_IN_ATOMIC_LOAD_16
:
7573 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
7574 target
= expand_builtin_atomic_load (mode
, exp
, target
);
7579 case BUILT_IN_ATOMIC_STORE_1
:
7580 case BUILT_IN_ATOMIC_STORE_2
:
7581 case BUILT_IN_ATOMIC_STORE_4
:
7582 case BUILT_IN_ATOMIC_STORE_8
:
7583 case BUILT_IN_ATOMIC_STORE_16
:
7584 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
7585 target
= expand_builtin_atomic_store (mode
, exp
);
7590 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
7591 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
7592 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
7593 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
7594 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
7596 enum built_in_function lib
;
7597 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
7598 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
7599 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
7600 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
7606 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
7607 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
7608 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
7609 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
7610 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
7612 enum built_in_function lib
;
7613 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
7614 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
7615 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
7616 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
7622 case BUILT_IN_ATOMIC_AND_FETCH_1
:
7623 case BUILT_IN_ATOMIC_AND_FETCH_2
:
7624 case BUILT_IN_ATOMIC_AND_FETCH_4
:
7625 case BUILT_IN_ATOMIC_AND_FETCH_8
:
7626 case BUILT_IN_ATOMIC_AND_FETCH_16
:
7628 enum built_in_function lib
;
7629 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
7630 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
7631 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
7632 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
7638 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
7639 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
7640 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
7641 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
7642 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
7644 enum built_in_function lib
;
7645 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
7646 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
7647 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
7648 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
7654 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
7655 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
7656 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
7657 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
7658 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
7660 enum built_in_function lib
;
7661 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
7662 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
7663 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
7664 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
7670 case BUILT_IN_ATOMIC_OR_FETCH_1
:
7671 case BUILT_IN_ATOMIC_OR_FETCH_2
:
7672 case BUILT_IN_ATOMIC_OR_FETCH_4
:
7673 case BUILT_IN_ATOMIC_OR_FETCH_8
:
7674 case BUILT_IN_ATOMIC_OR_FETCH_16
:
7676 enum built_in_function lib
;
7677 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
7678 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
7679 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
7680 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
7686 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
7687 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
7688 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
7689 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
7690 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
7691 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
7692 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
7693 ignore
, BUILT_IN_NONE
);
7698 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
7699 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
7700 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
7701 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
7702 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
7703 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
7704 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
7705 ignore
, BUILT_IN_NONE
);
7710 case BUILT_IN_ATOMIC_FETCH_AND_1
:
7711 case BUILT_IN_ATOMIC_FETCH_AND_2
:
7712 case BUILT_IN_ATOMIC_FETCH_AND_4
:
7713 case BUILT_IN_ATOMIC_FETCH_AND_8
:
7714 case BUILT_IN_ATOMIC_FETCH_AND_16
:
7715 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
7716 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
7717 ignore
, BUILT_IN_NONE
);
7722 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
7723 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
7724 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
7725 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
7726 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
7727 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
7728 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
7729 ignore
, BUILT_IN_NONE
);
7734 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
7735 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
7736 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
7737 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
7738 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
7739 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
7740 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
7741 ignore
, BUILT_IN_NONE
);
7746 case BUILT_IN_ATOMIC_FETCH_OR_1
:
7747 case BUILT_IN_ATOMIC_FETCH_OR_2
:
7748 case BUILT_IN_ATOMIC_FETCH_OR_4
:
7749 case BUILT_IN_ATOMIC_FETCH_OR_8
:
7750 case BUILT_IN_ATOMIC_FETCH_OR_16
:
7751 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
7752 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
7753 ignore
, BUILT_IN_NONE
);
7758 case BUILT_IN_ATOMIC_TEST_AND_SET
:
7759 return expand_builtin_atomic_test_and_set (exp
, target
);
7761 case BUILT_IN_ATOMIC_CLEAR
:
7762 return expand_builtin_atomic_clear (exp
);
7764 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
7765 return expand_builtin_atomic_always_lock_free (exp
);
7767 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
7768 target
= expand_builtin_atomic_is_lock_free (exp
);
7773 case BUILT_IN_ATOMIC_THREAD_FENCE
:
7774 expand_builtin_atomic_thread_fence (exp
);
7777 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
7778 expand_builtin_atomic_signal_fence (exp
);
7781 case BUILT_IN_OBJECT_SIZE
:
7782 return expand_builtin_object_size (exp
);
7784 case BUILT_IN_MEMCPY_CHK
:
7785 case BUILT_IN_MEMPCPY_CHK
:
7786 case BUILT_IN_MEMMOVE_CHK
:
7787 case BUILT_IN_MEMSET_CHK
:
7788 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7793 case BUILT_IN_STRCPY_CHK
:
7794 case BUILT_IN_STPCPY_CHK
:
7795 case BUILT_IN_STRNCPY_CHK
:
7796 case BUILT_IN_STPNCPY_CHK
:
7797 case BUILT_IN_STRCAT_CHK
:
7798 case BUILT_IN_STRNCAT_CHK
:
7799 case BUILT_IN_SNPRINTF_CHK
:
7800 case BUILT_IN_VSNPRINTF_CHK
:
7801 maybe_emit_chk_warning (exp
, fcode
);
7804 case BUILT_IN_SPRINTF_CHK
:
7805 case BUILT_IN_VSPRINTF_CHK
:
7806 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7810 if (warn_free_nonheap_object
)
7811 maybe_emit_free_warning (exp
);
7814 case BUILT_IN_THREAD_POINTER
:
7815 return expand_builtin_thread_pointer (exp
, target
);
7817 case BUILT_IN_SET_THREAD_POINTER
:
7818 expand_builtin_set_thread_pointer (exp
);
7821 case BUILT_IN_ACC_ON_DEVICE
:
7822 /* Do library call, if we failed to expand the builtin when
7826 case BUILT_IN_GOACC_PARLEVEL_ID
:
7827 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
7828 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
7830 default: /* just do library call, if unknown builtin */
7834 /* The switch statement above can drop through to cause the function
7835 to be called normally. */
7836 return expand_call (exp
, target
, ignore
);
7839 /* Determine whether a tree node represents a call to a built-in
7840 function. If the tree T is a call to a built-in function with
7841 the right number of arguments of the appropriate types, return
7842 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7843 Otherwise the return value is END_BUILTINS. */
7845 enum built_in_function
7846 builtin_mathfn_code (const_tree t
)
7848 const_tree fndecl
, arg
, parmlist
;
7849 const_tree argtype
, parmtype
;
7850 const_call_expr_arg_iterator iter
;
7852 if (TREE_CODE (t
) != CALL_EXPR
)
7853 return END_BUILTINS
;
7855 fndecl
= get_callee_fndecl (t
);
7856 if (fndecl
== NULL_TREE
7857 || TREE_CODE (fndecl
) != FUNCTION_DECL
7858 || ! DECL_BUILT_IN (fndecl
)
7859 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7860 return END_BUILTINS
;
7862 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7863 init_const_call_expr_arg_iterator (t
, &iter
);
7864 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7866 /* If a function doesn't take a variable number of arguments,
7867 the last element in the list will have type `void'. */
7868 parmtype
= TREE_VALUE (parmlist
);
7869 if (VOID_TYPE_P (parmtype
))
7871 if (more_const_call_expr_args_p (&iter
))
7872 return END_BUILTINS
;
7873 return DECL_FUNCTION_CODE (fndecl
);
7876 if (! more_const_call_expr_args_p (&iter
))
7877 return END_BUILTINS
;
7879 arg
= next_const_call_expr_arg (&iter
);
7880 argtype
= TREE_TYPE (arg
);
7882 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7884 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7885 return END_BUILTINS
;
7887 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7889 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7890 return END_BUILTINS
;
7892 else if (POINTER_TYPE_P (parmtype
))
7894 if (! POINTER_TYPE_P (argtype
))
7895 return END_BUILTINS
;
7897 else if (INTEGRAL_TYPE_P (parmtype
))
7899 if (! INTEGRAL_TYPE_P (argtype
))
7900 return END_BUILTINS
;
7903 return END_BUILTINS
;
7906 /* Variable-length argument list. */
7907 return DECL_FUNCTION_CODE (fndecl
);
7910 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7911 evaluate to a constant. */
7914 fold_builtin_constant_p (tree arg
)
7916 /* We return 1 for a numeric type that's known to be a constant
7917 value at compile-time or for an aggregate type that's a
7918 literal constant. */
7921 /* If we know this is a constant, emit the constant of one. */
7922 if (CONSTANT_CLASS_P (arg
)
7923 || (TREE_CODE (arg
) == CONSTRUCTOR
7924 && TREE_CONSTANT (arg
)))
7925 return integer_one_node
;
7926 if (TREE_CODE (arg
) == ADDR_EXPR
)
7928 tree op
= TREE_OPERAND (arg
, 0);
7929 if (TREE_CODE (op
) == STRING_CST
7930 || (TREE_CODE (op
) == ARRAY_REF
7931 && integer_zerop (TREE_OPERAND (op
, 1))
7932 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7933 return integer_one_node
;
7936 /* If this expression has side effects, show we don't know it to be a
7937 constant. Likewise if it's a pointer or aggregate type since in
7938 those case we only want literals, since those are only optimized
7939 when generating RTL, not later.
7940 And finally, if we are compiling an initializer, not code, we
7941 need to return a definite result now; there's not going to be any
7942 more optimization done. */
7943 if (TREE_SIDE_EFFECTS (arg
)
7944 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7945 || POINTER_TYPE_P (TREE_TYPE (arg
))
7947 || folding_initializer
7948 || force_folding_builtin_constant_p
)
7949 return integer_zero_node
;
7954 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7955 return it as a truthvalue. */
7958 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7961 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7963 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7964 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7965 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7966 pred_type
= TREE_VALUE (arg_types
);
7967 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7969 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7970 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7971 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7974 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7975 build_int_cst (ret_type
, 0));
7978 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7979 NULL_TREE if no simplification is possible. */
7982 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7984 tree inner
, fndecl
, inner_arg0
;
7985 enum tree_code code
;
7987 /* Distribute the expected value over short-circuiting operators.
7988 See through the cast from truthvalue_type_node to long. */
7990 while (CONVERT_EXPR_P (inner_arg0
)
7991 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7992 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7993 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7995 /* If this is a builtin_expect within a builtin_expect keep the
7996 inner one. See through a comparison against a constant. It
7997 might have been added to create a thruthvalue. */
8000 if (COMPARISON_CLASS_P (inner
)
8001 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8002 inner
= TREE_OPERAND (inner
, 0);
8004 if (TREE_CODE (inner
) == CALL_EXPR
8005 && (fndecl
= get_callee_fndecl (inner
))
8006 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8007 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
8011 code
= TREE_CODE (inner
);
8012 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8014 tree op0
= TREE_OPERAND (inner
, 0);
8015 tree op1
= TREE_OPERAND (inner
, 1);
8016 arg1
= save_expr (arg1
);
8018 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
8019 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
8020 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8022 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8025 /* If the argument isn't invariant then there's nothing else we can do. */
8026 if (!TREE_CONSTANT (inner_arg0
))
8029 /* If we expect that a comparison against the argument will fold to
8030 a constant return the constant. In practice, this means a true
8031 constant or the address of a non-weak symbol. */
8034 if (TREE_CODE (inner
) == ADDR_EXPR
)
8038 inner
= TREE_OPERAND (inner
, 0);
8040 while (TREE_CODE (inner
) == COMPONENT_REF
8041 || TREE_CODE (inner
) == ARRAY_REF
);
8042 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8046 /* Otherwise, ARG0 already has the proper type for the return value. */
8050 /* Fold a call to __builtin_classify_type with argument ARG. */
8053 fold_builtin_classify_type (tree arg
)
8056 return build_int_cst (integer_type_node
, no_type_class
);
8058 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8061 /* Fold a call to __builtin_strlen with argument ARG. */
8064 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
8066 if (!validate_arg (arg
, POINTER_TYPE
))
8070 tree len
= c_strlen (arg
, 0);
8073 return fold_convert_loc (loc
, type
, len
);
8079 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8082 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8084 REAL_VALUE_TYPE real
;
8086 /* __builtin_inff is intended to be usable to define INFINITY on all
8087 targets. If an infinity is not available, INFINITY expands "to a
8088 positive constant of type float that overflows at translation
8089 time", footnote "In this case, using INFINITY will violate the
8090 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8091 Thus we pedwarn to ensure this constraint violation is
8093 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8094 pedwarn (loc
, 0, "target format does not support infinity");
8097 return build_real (type
, real
);
8100 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8101 NULL_TREE if no simplification can be made. */
8104 fold_builtin_sincos (location_t loc
,
8105 tree arg0
, tree arg1
, tree arg2
)
8108 tree fndecl
, call
= NULL_TREE
;
8110 if (!validate_arg (arg0
, REAL_TYPE
)
8111 || !validate_arg (arg1
, POINTER_TYPE
)
8112 || !validate_arg (arg2
, POINTER_TYPE
))
8115 type
= TREE_TYPE (arg0
);
8117 /* Calculate the result when the argument is a constant. */
8118 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8119 if (fn
== END_BUILTINS
)
8122 /* Canonicalize sincos to cexpi. */
8123 if (TREE_CODE (arg0
) == REAL_CST
)
8125 tree complex_type
= build_complex_type (type
);
8126 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8130 if (!targetm
.libc_has_function (function_c99_math_complex
)
8131 || !builtin_decl_implicit_p (fn
))
8133 fndecl
= builtin_decl_explicit (fn
);
8134 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8135 call
= builtin_save_expr (call
);
8138 tree ptype
= build_pointer_type (type
);
8139 arg1
= fold_convert (ptype
, arg1
);
8140 arg2
= fold_convert (ptype
, arg2
);
8141 return build2 (COMPOUND_EXPR
, void_type_node
,
8142 build2 (MODIFY_EXPR
, void_type_node
,
8143 build_fold_indirect_ref_loc (loc
, arg1
),
8144 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8145 build2 (MODIFY_EXPR
, void_type_node
,
8146 build_fold_indirect_ref_loc (loc
, arg2
),
8147 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8150 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8151 Return NULL_TREE if no simplification can be made. */
8154 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8156 if (!validate_arg (arg1
, POINTER_TYPE
)
8157 || !validate_arg (arg2
, POINTER_TYPE
)
8158 || !validate_arg (len
, INTEGER_TYPE
))
8161 /* If the LEN parameter is zero, return zero. */
8162 if (integer_zerop (len
))
8163 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8166 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8167 if (operand_equal_p (arg1
, arg2
, 0))
8168 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8170 /* If len parameter is one, return an expression corresponding to
8171 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8172 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8174 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8175 tree cst_uchar_ptr_node
8176 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8179 = fold_convert_loc (loc
, integer_type_node
,
8180 build1 (INDIRECT_REF
, cst_uchar_node
,
8181 fold_convert_loc (loc
,
8185 = fold_convert_loc (loc
, integer_type_node
,
8186 build1 (INDIRECT_REF
, cst_uchar_node
,
8187 fold_convert_loc (loc
,
8190 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8196 /* Fold a call to builtin isascii with argument ARG. */
8199 fold_builtin_isascii (location_t loc
, tree arg
)
8201 if (!validate_arg (arg
, INTEGER_TYPE
))
8205 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8206 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8207 build_int_cst (integer_type_node
,
8208 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8209 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8210 arg
, integer_zero_node
);
8214 /* Fold a call to builtin toascii with argument ARG. */
8217 fold_builtin_toascii (location_t loc
, tree arg
)
8219 if (!validate_arg (arg
, INTEGER_TYPE
))
8222 /* Transform toascii(c) -> (c & 0x7f). */
8223 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8224 build_int_cst (integer_type_node
, 0x7f));
8227 /* Fold a call to builtin isdigit with argument ARG. */
8230 fold_builtin_isdigit (location_t loc
, tree arg
)
8232 if (!validate_arg (arg
, INTEGER_TYPE
))
8236 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8237 /* According to the C standard, isdigit is unaffected by locale.
8238 However, it definitely is affected by the target character set. */
8239 unsigned HOST_WIDE_INT target_digit0
8240 = lang_hooks
.to_target_charset ('0');
8242 if (target_digit0
== 0)
8245 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8246 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8247 build_int_cst (unsigned_type_node
, target_digit0
));
8248 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8249 build_int_cst (unsigned_type_node
, 9));
8253 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8256 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8258 if (!validate_arg (arg
, REAL_TYPE
))
8261 arg
= fold_convert_loc (loc
, type
, arg
);
8262 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8265 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8268 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8270 if (!validate_arg (arg
, INTEGER_TYPE
))
8273 arg
= fold_convert_loc (loc
, type
, arg
);
8274 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8277 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8280 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8282 if (validate_arg (arg
, COMPLEX_TYPE
)
8283 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8285 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8289 tree new_arg
= builtin_save_expr (arg
);
8290 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8291 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8292 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8299 /* Fold a call to builtin frexp, we can assume the base is 2. */
8302 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8304 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8309 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8312 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8314 /* Proceed if a valid pointer type was passed in. */
8315 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8317 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8323 /* For +-0, return (*exp = 0, +-0). */
8324 exp
= integer_zero_node
;
8329 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8330 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8333 /* Since the frexp function always expects base 2, and in
8334 GCC normalized significands are already in the range
8335 [0.5, 1.0), we have exactly what frexp wants. */
8336 REAL_VALUE_TYPE frac_rvt
= *value
;
8337 SET_REAL_EXP (&frac_rvt
, 0);
8338 frac
= build_real (rettype
, frac_rvt
);
8339 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8346 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8347 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8348 TREE_SIDE_EFFECTS (arg1
) = 1;
8349 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8355 /* Fold a call to builtin modf. */
8358 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8360 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8365 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8368 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8370 /* Proceed if a valid pointer type was passed in. */
8371 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8373 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8374 REAL_VALUE_TYPE trunc
, frac
;
8380 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8381 trunc
= frac
= *value
;
8384 /* For +-Inf, return (*arg1 = arg0, +-0). */
8386 frac
.sign
= value
->sign
;
8390 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8391 real_trunc (&trunc
, VOIDmode
, value
);
8392 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8393 /* If the original number was negative and already
8394 integral, then the fractional part is -0.0. */
8395 if (value
->sign
&& frac
.cl
== rvc_zero
)
8396 frac
.sign
= value
->sign
;
8400 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8401 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8402 build_real (rettype
, trunc
));
8403 TREE_SIDE_EFFECTS (arg1
) = 1;
8404 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8405 build_real (rettype
, frac
));
8411 /* Given a location LOC, an interclass builtin function decl FNDECL
8412 and its single argument ARG, return an folded expression computing
8413 the same, or NULL_TREE if we either couldn't or didn't want to fold
8414 (the latter happen if there's an RTL instruction available). */
8417 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8421 if (!validate_arg (arg
, REAL_TYPE
))
8424 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8427 mode
= TYPE_MODE (TREE_TYPE (arg
));
8429 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8431 /* If there is no optab, try generic code. */
8432 switch (DECL_FUNCTION_CODE (fndecl
))
8436 CASE_FLT_FN (BUILT_IN_ISINF
):
8438 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8439 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8440 tree type
= TREE_TYPE (arg
);
8444 if (is_ibm_extended
)
8446 /* NaN and Inf are encoded in the high-order double value
8447 only. The low-order value is not significant. */
8448 type
= double_type_node
;
8450 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8452 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8453 real_from_string (&r
, buf
);
8454 result
= build_call_expr (isgr_fn
, 2,
8455 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8456 build_real (type
, r
));
8459 CASE_FLT_FN (BUILT_IN_FINITE
):
8460 case BUILT_IN_ISFINITE
:
8462 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8463 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8464 tree type
= TREE_TYPE (arg
);
8468 if (is_ibm_extended
)
8470 /* NaN and Inf are encoded in the high-order double value
8471 only. The low-order value is not significant. */
8472 type
= double_type_node
;
8474 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8476 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8477 real_from_string (&r
, buf
);
8478 result
= build_call_expr (isle_fn
, 2,
8479 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8480 build_real (type
, r
));
8481 /*result = fold_build2_loc (loc, UNGT_EXPR,
8482 TREE_TYPE (TREE_TYPE (fndecl)),
8483 fold_build1_loc (loc, ABS_EXPR, type, arg),
8484 build_real (type, r));
8485 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8486 TREE_TYPE (TREE_TYPE (fndecl)),
8490 case BUILT_IN_ISNORMAL
:
8492 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8493 islessequal(fabs(x),DBL_MAX). */
8494 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8495 tree type
= TREE_TYPE (arg
);
8496 tree orig_arg
, max_exp
, min_exp
;
8497 machine_mode orig_mode
= mode
;
8498 REAL_VALUE_TYPE rmax
, rmin
;
8501 orig_arg
= arg
= builtin_save_expr (arg
);
8502 if (is_ibm_extended
)
8504 /* Use double to test the normal range of IBM extended
8505 precision. Emin for IBM extended precision is
8506 different to emin for IEEE double, being 53 higher
8507 since the low double exponent is at least 53 lower
8508 than the high double exponent. */
8509 type
= double_type_node
;
8511 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8513 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8515 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8516 real_from_string (&rmax
, buf
);
8517 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
8518 real_from_string (&rmin
, buf
);
8519 max_exp
= build_real (type
, rmax
);
8520 min_exp
= build_real (type
, rmin
);
8522 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
8523 if (is_ibm_extended
)
8525 /* Testing the high end of the range is done just using
8526 the high double, using the same test as isfinite().
8527 For the subnormal end of the range we first test the
8528 high double, then if its magnitude is equal to the
8529 limit of 0x1p-969, we test whether the low double is
8530 non-zero and opposite sign to the high double. */
8531 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
8532 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8533 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
8534 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
8536 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
8537 complex_double_type_node
, orig_arg
);
8538 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
8539 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
8540 tree zero
= build_real (type
, dconst0
);
8541 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
8542 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
8543 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
8544 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
8545 fold_build3 (COND_EXPR
,
8548 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
8550 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
8556 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8557 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
8559 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
8570 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8571 ARG is the argument for the call. */
8574 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8576 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8578 if (!validate_arg (arg
, REAL_TYPE
))
8581 switch (builtin_index
)
8583 case BUILT_IN_ISINF
:
8584 if (!HONOR_INFINITIES (arg
))
8585 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8589 case BUILT_IN_ISINF_SIGN
:
8591 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8592 /* In a boolean context, GCC will fold the inner COND_EXPR to
8593 1. So e.g. "if (isinf_sign(x))" would be folded to just
8594 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8595 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
8596 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8597 tree tmp
= NULL_TREE
;
8599 arg
= builtin_save_expr (arg
);
8601 if (signbit_fn
&& isinf_fn
)
8603 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8604 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8606 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8607 signbit_call
, integer_zero_node
);
8608 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8609 isinf_call
, integer_zero_node
);
8611 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8612 integer_minus_one_node
, integer_one_node
);
8613 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8621 case BUILT_IN_ISFINITE
:
8622 if (!HONOR_NANS (arg
)
8623 && !HONOR_INFINITIES (arg
))
8624 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8628 case BUILT_IN_ISNAN
:
8629 if (!HONOR_NANS (arg
))
8630 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8633 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
8634 if (is_ibm_extended
)
8636 /* NaN and Inf are encoded in the high-order double value
8637 only. The low-order value is not significant. */
8638 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
8641 arg
= builtin_save_expr (arg
);
8642 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8649 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8650 This builtin will generate code to return the appropriate floating
8651 point classification depending on the value of the floating point
8652 number passed in. The possible return values must be supplied as
8653 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8654 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8655 one floating point argument which is "type generic". */
8658 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8660 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8661 arg
, type
, res
, tmp
;
8666 /* Verify the required arguments in the original call. */
8668 || !validate_arg (args
[0], INTEGER_TYPE
)
8669 || !validate_arg (args
[1], INTEGER_TYPE
)
8670 || !validate_arg (args
[2], INTEGER_TYPE
)
8671 || !validate_arg (args
[3], INTEGER_TYPE
)
8672 || !validate_arg (args
[4], INTEGER_TYPE
)
8673 || !validate_arg (args
[5], REAL_TYPE
))
8677 fp_infinite
= args
[1];
8678 fp_normal
= args
[2];
8679 fp_subnormal
= args
[3];
8682 type
= TREE_TYPE (arg
);
8683 mode
= TYPE_MODE (type
);
8684 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8688 (fabs(x) == Inf ? FP_INFINITE :
8689 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8690 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8692 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8693 build_real (type
, dconst0
));
8694 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8695 tmp
, fp_zero
, fp_subnormal
);
8697 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8698 real_from_string (&r
, buf
);
8699 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8700 arg
, build_real (type
, r
));
8701 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8703 if (HONOR_INFINITIES (mode
))
8706 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8707 build_real (type
, r
));
8708 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8712 if (HONOR_NANS (mode
))
8714 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8715 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8721 /* Fold a call to an unordered comparison function such as
8722 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8723 being called and ARG0 and ARG1 are the arguments for the call.
8724 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8725 the opposite of the desired result. UNORDERED_CODE is used
8726 for modes that can hold NaNs and ORDERED_CODE is used for
8730 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8731 enum tree_code unordered_code
,
8732 enum tree_code ordered_code
)
8734 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8735 enum tree_code code
;
8737 enum tree_code code0
, code1
;
8738 tree cmp_type
= NULL_TREE
;
8740 type0
= TREE_TYPE (arg0
);
8741 type1
= TREE_TYPE (arg1
);
8743 code0
= TREE_CODE (type0
);
8744 code1
= TREE_CODE (type1
);
8746 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8747 /* Choose the wider of two real types. */
8748 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8750 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8752 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8755 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8756 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8758 if (unordered_code
== UNORDERED_EXPR
)
8760 if (!HONOR_NANS (arg0
))
8761 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8762 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8765 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8766 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8767 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8770 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8771 arithmetics if it can never overflow, or into internal functions that
8772 return both result of arithmetics and overflowed boolean flag in
8773 a complex integer result, or some other check for overflow.
8774 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8775 checking part of that. */
8778 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8779 tree arg0
, tree arg1
, tree arg2
)
8781 enum internal_fn ifn
= IFN_LAST
;
8782 /* The code of the expression corresponding to the type-generic
8783 built-in, or ERROR_MARK for the type-specific ones. */
8784 enum tree_code opcode
= ERROR_MARK
;
8785 bool ovf_only
= false;
8789 case BUILT_IN_ADD_OVERFLOW_P
:
8792 case BUILT_IN_ADD_OVERFLOW
:
8795 case BUILT_IN_SADD_OVERFLOW
:
8796 case BUILT_IN_SADDL_OVERFLOW
:
8797 case BUILT_IN_SADDLL_OVERFLOW
:
8798 case BUILT_IN_UADD_OVERFLOW
:
8799 case BUILT_IN_UADDL_OVERFLOW
:
8800 case BUILT_IN_UADDLL_OVERFLOW
:
8801 ifn
= IFN_ADD_OVERFLOW
;
8803 case BUILT_IN_SUB_OVERFLOW_P
:
8806 case BUILT_IN_SUB_OVERFLOW
:
8807 opcode
= MINUS_EXPR
;
8809 case BUILT_IN_SSUB_OVERFLOW
:
8810 case BUILT_IN_SSUBL_OVERFLOW
:
8811 case BUILT_IN_SSUBLL_OVERFLOW
:
8812 case BUILT_IN_USUB_OVERFLOW
:
8813 case BUILT_IN_USUBL_OVERFLOW
:
8814 case BUILT_IN_USUBLL_OVERFLOW
:
8815 ifn
= IFN_SUB_OVERFLOW
;
8817 case BUILT_IN_MUL_OVERFLOW_P
:
8820 case BUILT_IN_MUL_OVERFLOW
:
8823 case BUILT_IN_SMUL_OVERFLOW
:
8824 case BUILT_IN_SMULL_OVERFLOW
:
8825 case BUILT_IN_SMULLL_OVERFLOW
:
8826 case BUILT_IN_UMUL_OVERFLOW
:
8827 case BUILT_IN_UMULL_OVERFLOW
:
8828 case BUILT_IN_UMULLL_OVERFLOW
:
8829 ifn
= IFN_MUL_OVERFLOW
;
8835 /* For the "generic" overloads, the first two arguments can have different
8836 types and the last argument determines the target type to use to check
8837 for overflow. The arguments of the other overloads all have the same
8839 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8841 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8842 arguments are constant, attempt to fold the built-in call into a constant
8843 expression indicating whether or not it detected an overflow. */
8845 && TREE_CODE (arg0
) == INTEGER_CST
8846 && TREE_CODE (arg1
) == INTEGER_CST
)
8847 /* Perform the computation in the target type and check for overflow. */
8848 return omit_one_operand_loc (loc
, boolean_type_node
,
8849 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8850 ? boolean_true_node
: boolean_false_node
,
8853 tree ctype
= build_complex_type (type
);
8854 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8856 tree tgt
= save_expr (call
);
8857 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8858 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8859 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8862 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8864 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8866 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8867 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8870 /* Fold a call to __builtin_FILE to a constant string. */
8873 fold_builtin_FILE (location_t loc
)
8875 if (const char *fname
= LOCATION_FILE (loc
))
8877 /* The documentation says this builtin is equivalent to the preprocessor
8878 __FILE__ macro so it appears appropriate to use the same file prefix
8880 fname
= remap_macro_filename (fname
);
8881 return build_string_literal (strlen (fname
) + 1, fname
);
8884 return build_string_literal (1, "");
8887 /* Fold a call to __builtin_FUNCTION to a constant string. */
8890 fold_builtin_FUNCTION ()
8892 const char *name
= "";
8894 if (current_function_decl
)
8895 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
8897 return build_string_literal (strlen (name
) + 1, name
);
8900 /* Fold a call to __builtin_LINE to an integer constant. */
8903 fold_builtin_LINE (location_t loc
, tree type
)
8905 return build_int_cst (type
, LOCATION_LINE (loc
));
8908 /* Fold a call to built-in function FNDECL with 0 arguments.
8909 This function returns NULL_TREE if no simplification was possible. */
8912 fold_builtin_0 (location_t loc
, tree fndecl
)
8914 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8915 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8919 return fold_builtin_FILE (loc
);
8921 case BUILT_IN_FUNCTION
:
8922 return fold_builtin_FUNCTION ();
8925 return fold_builtin_LINE (loc
, type
);
8927 CASE_FLT_FN (BUILT_IN_INF
):
8928 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
8929 case BUILT_IN_INFD32
:
8930 case BUILT_IN_INFD64
:
8931 case BUILT_IN_INFD128
:
8932 return fold_builtin_inf (loc
, type
, true);
8934 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8935 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
8936 return fold_builtin_inf (loc
, type
, false);
8938 case BUILT_IN_CLASSIFY_TYPE
:
8939 return fold_builtin_classify_type (NULL_TREE
);
8947 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8948 This function returns NULL_TREE if no simplification was possible. */
8951 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8953 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8954 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8956 if (TREE_CODE (arg0
) == ERROR_MARK
)
8959 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
8964 case BUILT_IN_CONSTANT_P
:
8966 tree val
= fold_builtin_constant_p (arg0
);
8968 /* Gimplification will pull the CALL_EXPR for the builtin out of
8969 an if condition. When not optimizing, we'll not CSE it back.
8970 To avoid link error types of regressions, return false now. */
8971 if (!val
&& !optimize
)
8972 val
= integer_zero_node
;
8977 case BUILT_IN_CLASSIFY_TYPE
:
8978 return fold_builtin_classify_type (arg0
);
8980 case BUILT_IN_STRLEN
:
8981 return fold_builtin_strlen (loc
, type
, arg0
);
8983 CASE_FLT_FN (BUILT_IN_FABS
):
8984 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8985 case BUILT_IN_FABSD32
:
8986 case BUILT_IN_FABSD64
:
8987 case BUILT_IN_FABSD128
:
8988 return fold_builtin_fabs (loc
, arg0
, type
);
8992 case BUILT_IN_LLABS
:
8993 case BUILT_IN_IMAXABS
:
8994 return fold_builtin_abs (loc
, arg0
, type
);
8996 CASE_FLT_FN (BUILT_IN_CONJ
):
8997 if (validate_arg (arg0
, COMPLEX_TYPE
)
8998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8999 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9002 CASE_FLT_FN (BUILT_IN_CREAL
):
9003 if (validate_arg (arg0
, COMPLEX_TYPE
)
9004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9005 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9008 CASE_FLT_FN (BUILT_IN_CIMAG
):
9009 if (validate_arg (arg0
, COMPLEX_TYPE
)
9010 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9011 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9014 CASE_FLT_FN (BUILT_IN_CARG
):
9015 return fold_builtin_carg (loc
, arg0
, type
);
9017 case BUILT_IN_ISASCII
:
9018 return fold_builtin_isascii (loc
, arg0
);
9020 case BUILT_IN_TOASCII
:
9021 return fold_builtin_toascii (loc
, arg0
);
9023 case BUILT_IN_ISDIGIT
:
9024 return fold_builtin_isdigit (loc
, arg0
);
9026 CASE_FLT_FN (BUILT_IN_FINITE
):
9027 case BUILT_IN_FINITED32
:
9028 case BUILT_IN_FINITED64
:
9029 case BUILT_IN_FINITED128
:
9030 case BUILT_IN_ISFINITE
:
9032 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9035 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9038 CASE_FLT_FN (BUILT_IN_ISINF
):
9039 case BUILT_IN_ISINFD32
:
9040 case BUILT_IN_ISINFD64
:
9041 case BUILT_IN_ISINFD128
:
9043 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9046 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9049 case BUILT_IN_ISNORMAL
:
9050 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9052 case BUILT_IN_ISINF_SIGN
:
9053 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9055 CASE_FLT_FN (BUILT_IN_ISNAN
):
9056 case BUILT_IN_ISNAND32
:
9057 case BUILT_IN_ISNAND64
:
9058 case BUILT_IN_ISNAND128
:
9059 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9062 if (integer_zerop (arg0
))
9063 return build_empty_stmt (loc
);
9074 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9075 This function returns NULL_TREE if no simplification was possible. */
9078 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9080 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9081 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9083 if (TREE_CODE (arg0
) == ERROR_MARK
9084 || TREE_CODE (arg1
) == ERROR_MARK
)
9087 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9092 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9093 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9094 if (validate_arg (arg0
, REAL_TYPE
)
9095 && validate_arg (arg1
, POINTER_TYPE
))
9096 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9099 CASE_FLT_FN (BUILT_IN_FREXP
):
9100 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9102 CASE_FLT_FN (BUILT_IN_MODF
):
9103 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9105 case BUILT_IN_STRSPN
:
9106 return fold_builtin_strspn (loc
, arg0
, arg1
);
9108 case BUILT_IN_STRCSPN
:
9109 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9111 case BUILT_IN_STRPBRK
:
9112 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9114 case BUILT_IN_EXPECT
:
9115 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
9117 case BUILT_IN_ISGREATER
:
9118 return fold_builtin_unordered_cmp (loc
, fndecl
,
9119 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9120 case BUILT_IN_ISGREATEREQUAL
:
9121 return fold_builtin_unordered_cmp (loc
, fndecl
,
9122 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9123 case BUILT_IN_ISLESS
:
9124 return fold_builtin_unordered_cmp (loc
, fndecl
,
9125 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9126 case BUILT_IN_ISLESSEQUAL
:
9127 return fold_builtin_unordered_cmp (loc
, fndecl
,
9128 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9129 case BUILT_IN_ISLESSGREATER
:
9130 return fold_builtin_unordered_cmp (loc
, fndecl
,
9131 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9132 case BUILT_IN_ISUNORDERED
:
9133 return fold_builtin_unordered_cmp (loc
, fndecl
,
9134 arg0
, arg1
, UNORDERED_EXPR
,
9137 /* We do the folding for va_start in the expander. */
9138 case BUILT_IN_VA_START
:
9141 case BUILT_IN_OBJECT_SIZE
:
9142 return fold_builtin_object_size (arg0
, arg1
);
9144 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9145 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9147 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9148 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9156 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9158 This function returns NULL_TREE if no simplification was possible. */
9161 fold_builtin_3 (location_t loc
, tree fndecl
,
9162 tree arg0
, tree arg1
, tree arg2
)
9164 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9165 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9167 if (TREE_CODE (arg0
) == ERROR_MARK
9168 || TREE_CODE (arg1
) == ERROR_MARK
9169 || TREE_CODE (arg2
) == ERROR_MARK
)
9172 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9179 CASE_FLT_FN (BUILT_IN_SINCOS
):
9180 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9182 CASE_FLT_FN (BUILT_IN_REMQUO
):
9183 if (validate_arg (arg0
, REAL_TYPE
)
9184 && validate_arg (arg1
, REAL_TYPE
)
9185 && validate_arg (arg2
, POINTER_TYPE
))
9186 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9189 case BUILT_IN_MEMCMP
:
9190 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9192 case BUILT_IN_EXPECT
:
9193 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
9195 case BUILT_IN_ADD_OVERFLOW
:
9196 case BUILT_IN_SUB_OVERFLOW
:
9197 case BUILT_IN_MUL_OVERFLOW
:
9198 case BUILT_IN_ADD_OVERFLOW_P
:
9199 case BUILT_IN_SUB_OVERFLOW_P
:
9200 case BUILT_IN_MUL_OVERFLOW_P
:
9201 case BUILT_IN_SADD_OVERFLOW
:
9202 case BUILT_IN_SADDL_OVERFLOW
:
9203 case BUILT_IN_SADDLL_OVERFLOW
:
9204 case BUILT_IN_SSUB_OVERFLOW
:
9205 case BUILT_IN_SSUBL_OVERFLOW
:
9206 case BUILT_IN_SSUBLL_OVERFLOW
:
9207 case BUILT_IN_SMUL_OVERFLOW
:
9208 case BUILT_IN_SMULL_OVERFLOW
:
9209 case BUILT_IN_SMULLL_OVERFLOW
:
9210 case BUILT_IN_UADD_OVERFLOW
:
9211 case BUILT_IN_UADDL_OVERFLOW
:
9212 case BUILT_IN_UADDLL_OVERFLOW
:
9213 case BUILT_IN_USUB_OVERFLOW
:
9214 case BUILT_IN_USUBL_OVERFLOW
:
9215 case BUILT_IN_USUBLL_OVERFLOW
:
9216 case BUILT_IN_UMUL_OVERFLOW
:
9217 case BUILT_IN_UMULL_OVERFLOW
:
9218 case BUILT_IN_UMULLL_OVERFLOW
:
9219 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9227 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9228 arguments. IGNORE is true if the result of the
9229 function call is ignored. This function returns NULL_TREE if no
9230 simplification was possible. */
9233 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9235 tree ret
= NULL_TREE
;
9240 ret
= fold_builtin_0 (loc
, fndecl
);
9243 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9246 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9249 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9252 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9257 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9258 SET_EXPR_LOCATION (ret
, loc
);
9264 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9265 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9266 of arguments in ARGS to be omitted. OLDNARGS is the number of
9267 elements in ARGS. */
9270 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9271 int skip
, tree fndecl
, int n
, va_list newargs
)
9273 int nargs
= oldnargs
- skip
+ n
;
9280 buffer
= XALLOCAVEC (tree
, nargs
);
9281 for (i
= 0; i
< n
; i
++)
9282 buffer
[i
] = va_arg (newargs
, tree
);
9283 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9284 buffer
[i
] = args
[j
];
9287 buffer
= args
+ skip
;
9289 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9292 /* Return true if FNDECL shouldn't be folded right now.
9293 If a built-in function has an inline attribute always_inline
9294 wrapper, defer folding it after always_inline functions have
9295 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9296 might not be performed. */
9299 avoid_folding_inline_builtin (tree fndecl
)
9301 return (DECL_DECLARED_INLINE_P (fndecl
)
9302 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9304 && !cfun
->always_inline_functions_inlined
9305 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9308 /* A wrapper function for builtin folding that prevents warnings for
9309 "statement without effect" and the like, caused by removing the
9310 call node earlier than the warning is generated. */
9313 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9315 tree ret
= NULL_TREE
;
9316 tree fndecl
= get_callee_fndecl (exp
);
9318 && TREE_CODE (fndecl
) == FUNCTION_DECL
9319 && DECL_BUILT_IN (fndecl
)
9320 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9321 yet. Defer folding until we see all the arguments
9322 (after inlining). */
9323 && !CALL_EXPR_VA_ARG_PACK (exp
))
9325 int nargs
= call_expr_nargs (exp
);
9327 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9328 instead last argument is __builtin_va_arg_pack (). Defer folding
9329 even in that case, until arguments are finalized. */
9330 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9332 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9334 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9335 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9336 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9340 if (avoid_folding_inline_builtin (fndecl
))
9343 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9344 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9345 CALL_EXPR_ARGP (exp
), ignore
);
9348 tree
*args
= CALL_EXPR_ARGP (exp
);
9349 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9357 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9358 N arguments are passed in the array ARGARRAY. Return a folded
9359 expression or NULL_TREE if no simplification was possible. */
9362 fold_builtin_call_array (location_t loc
, tree
,
9367 if (TREE_CODE (fn
) != ADDR_EXPR
)
9370 tree fndecl
= TREE_OPERAND (fn
, 0);
9371 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9372 && DECL_BUILT_IN (fndecl
))
9374 /* If last argument is __builtin_va_arg_pack (), arguments to this
9375 function are not finalized yet. Defer folding until they are. */
9376 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9378 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9380 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9381 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9382 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9385 if (avoid_folding_inline_builtin (fndecl
))
9387 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9388 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9390 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9396 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9397 along with N new arguments specified as the "..." parameters. SKIP
9398 is the number of arguments in EXP to be omitted. This function is used
9399 to do varargs-to-varargs transformations. */
9402 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9408 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9409 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9415 /* Validate a single argument ARG against a tree code CODE representing
9416 a type. Return true when argument is valid. */
9419 validate_arg (const_tree arg
, enum tree_code code
)
9423 else if (code
== POINTER_TYPE
)
9424 return POINTER_TYPE_P (TREE_TYPE (arg
));
9425 else if (code
== INTEGER_TYPE
)
9426 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9427 return code
== TREE_CODE (TREE_TYPE (arg
));
9430 /* This function validates the types of a function call argument list
9431 against a specified list of tree_codes. If the last specifier is a 0,
9432 that represents an ellipses, otherwise the last specifier must be a
9435 This is the GIMPLE version of validate_arglist. Eventually we want to
9436 completely convert builtins.c to work from GIMPLEs and the tree based
9437 validate_arglist will then be removed. */
9440 validate_gimple_arglist (const gcall
*call
, ...)
9442 enum tree_code code
;
9448 va_start (ap
, call
);
9453 code
= (enum tree_code
) va_arg (ap
, int);
9457 /* This signifies an ellipses, any further arguments are all ok. */
9461 /* This signifies an endlink, if no arguments remain, return
9462 true, otherwise return false. */
9463 res
= (i
== gimple_call_num_args (call
));
9466 /* If no parameters remain or the parameter's code does not
9467 match the specified code, return false. Otherwise continue
9468 checking any remaining arguments. */
9469 arg
= gimple_call_arg (call
, i
++);
9470 if (!validate_arg (arg
, code
))
9477 /* We need gotos here since we can only have one VA_CLOSE in a
9485 /* Default target-specific builtin expander that does nothing. */
9488 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9489 rtx target ATTRIBUTE_UNUSED
,
9490 rtx subtarget ATTRIBUTE_UNUSED
,
9491 machine_mode mode ATTRIBUTE_UNUSED
,
9492 int ignore ATTRIBUTE_UNUSED
)
9497 /* Returns true is EXP represents data that would potentially reside
9498 in a readonly section. */
9501 readonly_data_expr (tree exp
)
9505 if (TREE_CODE (exp
) != ADDR_EXPR
)
9508 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9512 /* Make sure we call decl_readonly_section only for trees it
9513 can handle (since it returns true for everything it doesn't
9515 if (TREE_CODE (exp
) == STRING_CST
9516 || TREE_CODE (exp
) == CONSTRUCTOR
9517 || (VAR_P (exp
) && TREE_STATIC (exp
)))
9518 return decl_readonly_section (exp
, 0);
9523 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9524 to the call, and TYPE is its return type.
9526 Return NULL_TREE if no simplification was possible, otherwise return the
9527 simplified form of the call as a tree.
9529 The simplified form may be a constant or other expression which
9530 computes the same value, but in a more efficient manner (including
9531 calls to other builtin functions).
9533 The call may contain arguments which need to be evaluated, but
9534 which are not useful to determine the result of the call. In
9535 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9536 COMPOUND_EXPR will be an argument which must be evaluated.
9537 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9538 COMPOUND_EXPR in the chain will contain the tree for the simplified
9539 form of the builtin function call. */
9542 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
9544 if (!validate_arg (s1
, POINTER_TYPE
)
9545 || !validate_arg (s2
, POINTER_TYPE
))
9550 const char *p1
, *p2
;
9559 const char *r
= strpbrk (p1
, p2
);
9563 return build_int_cst (TREE_TYPE (s1
), 0);
9565 /* Return an offset into the constant string argument. */
9566 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9567 return fold_convert_loc (loc
, type
, tem
);
9571 /* strpbrk(x, "") == NULL.
9572 Evaluate and ignore s1 in case it had side-effects. */
9573 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
9576 return NULL_TREE
; /* Really call strpbrk. */
9578 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9582 /* New argument list transforming strpbrk(s1, s2) to
9583 strchr(s1, s2[0]). */
9584 return build_call_expr_loc (loc
, fn
, 2, s1
,
9585 build_int_cst (integer_type_node
, p2
[0]));
9589 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9592 Return NULL_TREE if no simplification was possible, otherwise return the
9593 simplified form of the call as a tree.
9595 The simplified form may be a constant or other expression which
9596 computes the same value, but in a more efficient manner (including
9597 calls to other builtin functions).
9599 The call may contain arguments which need to be evaluated, but
9600 which are not useful to determine the result of the call. In
9601 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9602 COMPOUND_EXPR will be an argument which must be evaluated.
9603 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9604 COMPOUND_EXPR in the chain will contain the tree for the simplified
9605 form of the builtin function call. */
9608 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
9610 if (!validate_arg (s1
, POINTER_TYPE
)
9611 || !validate_arg (s2
, POINTER_TYPE
))
9615 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9617 /* If either argument is "", return NULL_TREE. */
9618 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9619 /* Evaluate and ignore both arguments in case either one has
9621 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9627 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9630 Return NULL_TREE if no simplification was possible, otherwise return the
9631 simplified form of the call as a tree.
9633 The simplified form may be a constant or other expression which
9634 computes the same value, but in a more efficient manner (including
9635 calls to other builtin functions).
9637 The call may contain arguments which need to be evaluated, but
9638 which are not useful to determine the result of the call. In
9639 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9640 COMPOUND_EXPR will be an argument which must be evaluated.
9641 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9642 COMPOUND_EXPR in the chain will contain the tree for the simplified
9643 form of the builtin function call. */
9646 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9648 if (!validate_arg (s1
, POINTER_TYPE
)
9649 || !validate_arg (s2
, POINTER_TYPE
))
9653 /* If the first argument is "", return NULL_TREE. */
9654 const char *p1
= c_getstr (s1
);
9655 if (p1
&& *p1
== '\0')
9657 /* Evaluate and ignore argument s2 in case it has
9659 return omit_one_operand_loc (loc
, size_type_node
,
9660 size_zero_node
, s2
);
9663 /* If the second argument is "", return __builtin_strlen(s1). */
9664 const char *p2
= c_getstr (s2
);
9665 if (p2
&& *p2
== '\0')
9667 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9669 /* If the replacement _DECL isn't initialized, don't do the
9674 return build_call_expr_loc (loc
, fn
, 1, s1
);
9680 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9681 produced. False otherwise. This is done so that we don't output the error
9682 or warning twice or three times. */
9685 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9687 tree fntype
= TREE_TYPE (current_function_decl
);
9688 int nargs
= call_expr_nargs (exp
);
9690 /* There is good chance the current input_location points inside the
9691 definition of the va_start macro (perhaps on the token for
9692 builtin) in a system header, so warnings will not be emitted.
9693 Use the location in real source code. */
9694 source_location current_location
=
9695 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9698 if (!stdarg_p (fntype
))
9700 error ("%<va_start%> used in function with fixed args");
9706 if (va_start_p
&& (nargs
!= 2))
9708 error ("wrong number of arguments to function %<va_start%>");
9711 arg
= CALL_EXPR_ARG (exp
, 1);
9713 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9714 when we checked the arguments and if needed issued a warning. */
9719 /* Evidently an out of date version of <stdarg.h>; can't validate
9720 va_start's second argument, but can still work as intended. */
9721 warning_at (current_location
,
9723 "%<__builtin_next_arg%> called without an argument");
9728 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9731 arg
= CALL_EXPR_ARG (exp
, 0);
9734 if (TREE_CODE (arg
) == SSA_NAME
)
9735 arg
= SSA_NAME_VAR (arg
);
9737 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9738 or __builtin_next_arg (0) the first time we see it, after checking
9739 the arguments and if needed issuing a warning. */
9740 if (!integer_zerop (arg
))
9742 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9744 /* Strip off all nops for the sake of the comparison. This
9745 is not quite the same as STRIP_NOPS. It does more.
9746 We must also strip off INDIRECT_EXPR for C++ reference
9748 while (CONVERT_EXPR_P (arg
)
9749 || TREE_CODE (arg
) == INDIRECT_REF
)
9750 arg
= TREE_OPERAND (arg
, 0);
9751 if (arg
!= last_parm
)
9753 /* FIXME: Sometimes with the tree optimizers we can get the
9754 not the last argument even though the user used the last
9755 argument. We just warn and set the arg to be the last
9756 argument so that we will get wrong-code because of
9758 warning_at (current_location
,
9760 "second parameter of %<va_start%> not last named argument");
9763 /* Undefined by C99 7.15.1.4p4 (va_start):
9764 "If the parameter parmN is declared with the register storage
9765 class, with a function or array type, or with a type that is
9766 not compatible with the type that results after application of
9767 the default argument promotions, the behavior is undefined."
9769 else if (DECL_REGISTER (arg
))
9771 warning_at (current_location
,
9773 "undefined behavior when second parameter of "
9774 "%<va_start%> is declared with %<register%> storage");
9777 /* We want to verify the second parameter just once before the tree
9778 optimizers are run and then avoid keeping it in the tree,
9779 as otherwise we could warn even for correct code like:
9780 void foo (int i, ...)
9781 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9783 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9785 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9791 /* Expand a call EXP to __builtin_object_size. */
9794 expand_builtin_object_size (tree exp
)
9797 int object_size_type
;
9798 tree fndecl
= get_callee_fndecl (exp
);
9800 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9802 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9804 expand_builtin_trap ();
9808 ost
= CALL_EXPR_ARG (exp
, 1);
9811 if (TREE_CODE (ost
) != INTEGER_CST
9812 || tree_int_cst_sgn (ost
) < 0
9813 || compare_tree_int (ost
, 3) > 0)
9815 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9817 expand_builtin_trap ();
9821 object_size_type
= tree_to_shwi (ost
);
9823 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9826 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9827 FCODE is the BUILT_IN_* to use.
9828 Return NULL_RTX if we failed; the caller should emit a normal call,
9829 otherwise try to get the result in TARGET, if convenient (and in
9830 mode MODE if that's convenient). */
9833 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9834 enum built_in_function fcode
)
9836 if (!validate_arglist (exp
,
9838 fcode
== BUILT_IN_MEMSET_CHK
9839 ? INTEGER_TYPE
: POINTER_TYPE
,
9840 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9843 tree dest
= CALL_EXPR_ARG (exp
, 0);
9844 tree src
= CALL_EXPR_ARG (exp
, 1);
9845 tree len
= CALL_EXPR_ARG (exp
, 2);
9846 tree size
= CALL_EXPR_ARG (exp
, 3);
9848 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
9849 /*str=*/NULL_TREE
, size
);
9851 if (!tree_fits_uhwi_p (size
))
9854 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9856 /* Avoid transforming the checking call to an ordinary one when
9857 an overflow has been detected or when the call couldn't be
9858 validated because the size is not constant. */
9859 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9862 tree fn
= NULL_TREE
;
9863 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9864 mem{cpy,pcpy,move,set} is available. */
9867 case BUILT_IN_MEMCPY_CHK
:
9868 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9870 case BUILT_IN_MEMPCPY_CHK
:
9871 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9873 case BUILT_IN_MEMMOVE_CHK
:
9874 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9876 case BUILT_IN_MEMSET_CHK
:
9877 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9886 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9887 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9888 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9889 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9891 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9895 unsigned int dest_align
= get_pointer_alignment (dest
);
9897 /* If DEST is not a pointer type, call the normal function. */
9898 if (dest_align
== 0)
9901 /* If SRC and DEST are the same (and not volatile), do nothing. */
9902 if (operand_equal_p (src
, dest
, 0))
9906 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9908 /* Evaluate and ignore LEN in case it has side-effects. */
9909 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9910 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9913 expr
= fold_build_pointer_plus (dest
, len
);
9914 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9917 /* __memmove_chk special case. */
9918 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9920 unsigned int src_align
= get_pointer_alignment (src
);
9925 /* If src is categorized for a readonly section we can use
9926 normal __memcpy_chk. */
9927 if (readonly_data_expr (src
))
9929 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
9932 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
9933 dest
, src
, len
, size
);
9934 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9935 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9936 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9943 /* Emit warning if a buffer overflow is detected at compile time. */
9946 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
9948 /* The source string. */
9949 tree srcstr
= NULL_TREE
;
9950 /* The size of the destination object. */
9951 tree objsize
= NULL_TREE
;
9952 /* The string that is being concatenated with (as in __strcat_chk)
9953 or null if it isn't. */
9954 tree catstr
= NULL_TREE
;
9955 /* The maximum length of the source sequence in a bounded operation
9956 (such as __strncat_chk) or null if the operation isn't bounded
9957 (such as __strcat_chk). */
9958 tree maxread
= NULL_TREE
;
9959 /* The exact size of the access (such as in __strncpy_chk). */
9960 tree size
= NULL_TREE
;
9964 case BUILT_IN_STRCPY_CHK
:
9965 case BUILT_IN_STPCPY_CHK
:
9966 srcstr
= CALL_EXPR_ARG (exp
, 1);
9967 objsize
= CALL_EXPR_ARG (exp
, 2);
9970 case BUILT_IN_STRCAT_CHK
:
9971 /* For __strcat_chk the warning will be emitted only if overflowing
9972 by at least strlen (dest) + 1 bytes. */
9973 catstr
= CALL_EXPR_ARG (exp
, 0);
9974 srcstr
= CALL_EXPR_ARG (exp
, 1);
9975 objsize
= CALL_EXPR_ARG (exp
, 2);
9978 case BUILT_IN_STRNCAT_CHK
:
9979 catstr
= CALL_EXPR_ARG (exp
, 0);
9980 srcstr
= CALL_EXPR_ARG (exp
, 1);
9981 maxread
= CALL_EXPR_ARG (exp
, 2);
9982 objsize
= CALL_EXPR_ARG (exp
, 3);
9985 case BUILT_IN_STRNCPY_CHK
:
9986 case BUILT_IN_STPNCPY_CHK
:
9987 srcstr
= CALL_EXPR_ARG (exp
, 1);
9988 size
= CALL_EXPR_ARG (exp
, 2);
9989 objsize
= CALL_EXPR_ARG (exp
, 3);
9992 case BUILT_IN_SNPRINTF_CHK
:
9993 case BUILT_IN_VSNPRINTF_CHK
:
9994 maxread
= CALL_EXPR_ARG (exp
, 1);
9995 objsize
= CALL_EXPR_ARG (exp
, 3);
10001 if (catstr
&& maxread
)
10003 /* Check __strncat_chk. There is no way to determine the length
10004 of the string to which the source string is being appended so
10005 just warn when the length of the source string is not known. */
10006 check_strncat_sizes (exp
, objsize
);
10010 /* The destination argument is the first one for all built-ins above. */
10011 tree dst
= CALL_EXPR_ARG (exp
, 0);
10013 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
10016 /* Emit warning if a buffer overflow is detected at compile time
10017 in __sprintf_chk/__vsprintf_chk calls. */
10020 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10022 tree size
, len
, fmt
;
10023 const char *fmt_str
;
10024 int nargs
= call_expr_nargs (exp
);
10026 /* Verify the required arguments in the original call. */
10030 size
= CALL_EXPR_ARG (exp
, 2);
10031 fmt
= CALL_EXPR_ARG (exp
, 3);
10033 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10036 /* Check whether the format is a literal string constant. */
10037 fmt_str
= c_getstr (fmt
);
10038 if (fmt_str
== NULL
)
10041 if (!init_target_chars ())
10044 /* If the format doesn't contain % args or %%, we know its size. */
10045 if (strchr (fmt_str
, target_percent
) == 0)
10046 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10047 /* If the format is "%s" and first ... argument is a string literal,
10049 else if (fcode
== BUILT_IN_SPRINTF_CHK
10050 && strcmp (fmt_str
, target_percent_s
) == 0)
10056 arg
= CALL_EXPR_ARG (exp
, 4);
10057 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10060 len
= c_strlen (arg
, 1);
10061 if (!len
|| ! tree_fits_uhwi_p (len
))
10067 /* Add one for the terminating nul. */
10068 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10070 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
10071 /*maxread=*/NULL_TREE
, len
, size
);
10074 /* Emit warning if a free is called with address of a variable. */
10077 maybe_emit_free_warning (tree exp
)
10079 tree arg
= CALL_EXPR_ARG (exp
, 0);
10082 if (TREE_CODE (arg
) != ADDR_EXPR
)
10085 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10086 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10089 if (SSA_VAR_P (arg
))
10090 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10091 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10093 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10094 "%Kattempt to free a non-heap object", exp
);
10097 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10101 fold_builtin_object_size (tree ptr
, tree ost
)
10103 unsigned HOST_WIDE_INT bytes
;
10104 int object_size_type
;
10106 if (!validate_arg (ptr
, POINTER_TYPE
)
10107 || !validate_arg (ost
, INTEGER_TYPE
))
10112 if (TREE_CODE (ost
) != INTEGER_CST
10113 || tree_int_cst_sgn (ost
) < 0
10114 || compare_tree_int (ost
, 3) > 0)
10117 object_size_type
= tree_to_shwi (ost
);
10119 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10120 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10121 and (size_t) 0 for types 2 and 3. */
10122 if (TREE_SIDE_EFFECTS (ptr
))
10123 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10125 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10127 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10128 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10129 return build_int_cstu (size_type_node
, bytes
);
10131 else if (TREE_CODE (ptr
) == SSA_NAME
)
10133 /* If object size is not known yet, delay folding until
10134 later. Maybe subsequent passes will help determining
10136 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10137 && wi::fits_to_tree_p (bytes
, size_type_node
))
10138 return build_int_cstu (size_type_node
, bytes
);
10144 /* Builtins with folding operations that operate on "..." arguments
10145 need special handling; we need to store the arguments in a convenient
10146 data structure before attempting any folding. Fortunately there are
10147 only a few builtins that fall into this category. FNDECL is the
10148 function, EXP is the CALL_EXPR for the call. */
10151 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10153 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10154 tree ret
= NULL_TREE
;
10158 case BUILT_IN_FPCLASSIFY
:
10159 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10167 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10168 SET_EXPR_LOCATION (ret
, loc
);
10169 TREE_NO_WARNING (ret
) = 1;
10175 /* Initialize format string characters in the target charset. */
10178 init_target_chars (void)
10183 target_newline
= lang_hooks
.to_target_charset ('\n');
10184 target_percent
= lang_hooks
.to_target_charset ('%');
10185 target_c
= lang_hooks
.to_target_charset ('c');
10186 target_s
= lang_hooks
.to_target_charset ('s');
10187 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10191 target_percent_c
[0] = target_percent
;
10192 target_percent_c
[1] = target_c
;
10193 target_percent_c
[2] = '\0';
10195 target_percent_s
[0] = target_percent
;
10196 target_percent_s
[1] = target_s
;
10197 target_percent_s
[2] = '\0';
10199 target_percent_s_newline
[0] = target_percent
;
10200 target_percent_s_newline
[1] = target_s
;
10201 target_percent_s_newline
[2] = target_newline
;
10202 target_percent_s_newline
[3] = '\0';
10209 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10210 and no overflow/underflow occurred. INEXACT is true if M was not
10211 exactly calculated. TYPE is the tree type for the result. This
10212 function assumes that you cleared the MPFR flags and then
10213 calculated M to see if anything subsequently set a flag prior to
10214 entering this function. Return NULL_TREE if any checks fail. */
10217 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10219 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10220 overflow/underflow occurred. If -frounding-math, proceed iff the
10221 result of calling FUNC was exact. */
10222 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10223 && (!flag_rounding_math
|| !inexact
))
10225 REAL_VALUE_TYPE rr
;
10227 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10228 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10229 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10230 but the mpft_t is not, then we underflowed in the
10232 if (real_isfinite (&rr
)
10233 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10235 REAL_VALUE_TYPE rmode
;
10237 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10238 /* Proceed iff the specified mode can hold the value. */
10239 if (real_identical (&rmode
, &rr
))
10240 return build_real (type
, rmode
);
10246 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10247 number and no overflow/underflow occurred. INEXACT is true if M
10248 was not exactly calculated. TYPE is the tree type for the result.
10249 This function assumes that you cleared the MPFR flags and then
10250 calculated M to see if anything subsequently set a flag prior to
10251 entering this function. Return NULL_TREE if any checks fail, if
10252 FORCE_CONVERT is true, then bypass the checks. */
10255 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10257 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10258 overflow/underflow occurred. If -frounding-math, proceed iff the
10259 result of calling FUNC was exact. */
10261 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10262 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10263 && (!flag_rounding_math
|| !inexact
)))
10265 REAL_VALUE_TYPE re
, im
;
10267 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10268 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10269 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10270 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10271 but the mpft_t is not, then we underflowed in the
10274 || (real_isfinite (&re
) && real_isfinite (&im
)
10275 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10276 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10278 REAL_VALUE_TYPE re_mode
, im_mode
;
10280 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10281 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10282 /* Proceed iff the specified mode can hold the value. */
10284 || (real_identical (&re_mode
, &re
)
10285 && real_identical (&im_mode
, &im
)))
10286 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10287 build_real (TREE_TYPE (type
), im_mode
));
10293 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10294 the pointer *(ARG_QUO) and return the result. The type is taken
10295 from the type of ARG0 and is used for setting the precision of the
10296 calculation and results. */
10299 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10301 tree
const type
= TREE_TYPE (arg0
);
10302 tree result
= NULL_TREE
;
10307 /* To proceed, MPFR must exactly represent the target floating point
10308 format, which only happens when the target base equals two. */
10309 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10310 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10311 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10313 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10314 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10316 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10318 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10319 const int prec
= fmt
->p
;
10320 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10325 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10326 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10327 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10328 mpfr_clear_flags ();
10329 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10330 /* Remquo is independent of the rounding mode, so pass
10331 inexact=0 to do_mpfr_ckconv(). */
10332 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10333 mpfr_clears (m0
, m1
, NULL
);
10336 /* MPFR calculates quo in the host's long so it may
10337 return more bits in quo than the target int can hold
10338 if sizeof(host long) > sizeof(target int). This can
10339 happen even for native compilers in LP64 mode. In
10340 these cases, modulo the quo value with the largest
10341 number that the target int can hold while leaving one
10342 bit for the sign. */
10343 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10344 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10346 /* Dereference the quo pointer argument. */
10347 arg_quo
= build_fold_indirect_ref (arg_quo
);
10348 /* Proceed iff a valid pointer type was passed in. */
10349 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10351 /* Set the value. */
10353 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10354 build_int_cst (TREE_TYPE (arg_quo
),
10356 TREE_SIDE_EFFECTS (result_quo
) = 1;
10357 /* Combine the quo assignment with the rem. */
10358 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10359 result_quo
, result_rem
));
10367 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10368 resulting value as a tree with type TYPE. The mpfr precision is
10369 set to the precision of TYPE. We assume that this mpfr function
10370 returns zero if the result could be calculated exactly within the
10371 requested precision. In addition, the integer pointer represented
10372 by ARG_SG will be dereferenced and set to the appropriate signgam
10376 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10378 tree result
= NULL_TREE
;
10382 /* To proceed, MPFR must exactly represent the target floating point
10383 format, which only happens when the target base equals two. Also
10384 verify ARG is a constant and that ARG_SG is an int pointer. */
10385 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10386 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10387 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10388 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10390 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10392 /* In addition to NaN and Inf, the argument cannot be zero or a
10393 negative integer. */
10394 if (real_isfinite (ra
)
10395 && ra
->cl
!= rvc_zero
10396 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10398 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10399 const int prec
= fmt
->p
;
10400 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10405 mpfr_init2 (m
, prec
);
10406 mpfr_from_real (m
, ra
, GMP_RNDN
);
10407 mpfr_clear_flags ();
10408 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10409 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10415 /* Dereference the arg_sg pointer argument. */
10416 arg_sg
= build_fold_indirect_ref (arg_sg
);
10417 /* Assign the signgam value into *arg_sg. */
10418 result_sg
= fold_build2 (MODIFY_EXPR
,
10419 TREE_TYPE (arg_sg
), arg_sg
,
10420 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10421 TREE_SIDE_EFFECTS (result_sg
) = 1;
10422 /* Combine the signgam assignment with the lgamma result. */
10423 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10424 result_sg
, result_lg
));
10432 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10433 mpc function FUNC on it and return the resulting value as a tree
10434 with type TYPE. The mpfr precision is set to the precision of
10435 TYPE. We assume that function FUNC returns zero if the result
10436 could be calculated exactly within the requested precision. If
10437 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10438 in the arguments and/or results. */
10441 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10442 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10444 tree result
= NULL_TREE
;
10449 /* To proceed, MPFR must exactly represent the target floating point
10450 format, which only happens when the target base equals two. */
10451 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10452 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10453 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10454 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10455 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10457 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10458 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10459 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10460 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10463 || (real_isfinite (re0
) && real_isfinite (im0
)
10464 && real_isfinite (re1
) && real_isfinite (im1
)))
10466 const struct real_format
*const fmt
=
10467 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10468 const int prec
= fmt
->p
;
10469 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10470 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10474 mpc_init2 (m0
, prec
);
10475 mpc_init2 (m1
, prec
);
10476 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10477 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10478 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10479 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10480 mpfr_clear_flags ();
10481 inexact
= func (m0
, m0
, m1
, crnd
);
10482 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
10491 /* A wrapper function for builtin folding that prevents warnings for
10492 "statement without effect" and the like, caused by removing the
10493 call node earlier than the warning is generated. */
10496 fold_call_stmt (gcall
*stmt
, bool ignore
)
10498 tree ret
= NULL_TREE
;
10499 tree fndecl
= gimple_call_fndecl (stmt
);
10500 location_t loc
= gimple_location (stmt
);
10502 && TREE_CODE (fndecl
) == FUNCTION_DECL
10503 && DECL_BUILT_IN (fndecl
)
10504 && !gimple_call_va_arg_pack_p (stmt
))
10506 int nargs
= gimple_call_num_args (stmt
);
10507 tree
*args
= (nargs
> 0
10508 ? gimple_call_arg_ptr (stmt
, 0)
10509 : &error_mark_node
);
10511 if (avoid_folding_inline_builtin (fndecl
))
10513 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10515 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
10519 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10522 /* Propagate location information from original call to
10523 expansion of builtin. Otherwise things like
10524 maybe_emit_chk_warning, that operate on the expansion
10525 of a builtin, will use the wrong location information. */
10526 if (gimple_has_location (stmt
))
10528 tree realret
= ret
;
10529 if (TREE_CODE (ret
) == NOP_EXPR
)
10530 realret
= TREE_OPERAND (ret
, 0);
10531 if (CAN_HAVE_LOCATION_P (realret
)
10532 && !EXPR_HAS_LOCATION (realret
))
10533 SET_EXPR_LOCATION (realret
, loc
);
10543 /* Look up the function in builtin_decl that corresponds to DECL
10544 and set ASMSPEC as its user assembler name. DECL must be a
10545 function decl that declares a builtin. */
10548 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
10550 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
10551 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
10554 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
10555 set_user_assembler_name (builtin
, asmspec
);
10557 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
10558 && INT_TYPE_SIZE
< BITS_PER_WORD
)
10560 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
10561 set_user_assembler_libfunc ("ffs", asmspec
);
10562 set_optab_libfunc (ffs_optab
, mode
, "ffs");
10566 /* Return true if DECL is a builtin that expands to a constant or similarly
10569 is_simple_builtin (tree decl
)
10571 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10572 switch (DECL_FUNCTION_CODE (decl
))
10574 /* Builtins that expand to constants. */
10575 case BUILT_IN_CONSTANT_P
:
10576 case BUILT_IN_EXPECT
:
10577 case BUILT_IN_OBJECT_SIZE
:
10578 case BUILT_IN_UNREACHABLE
:
10579 /* Simple register moves or loads from stack. */
10580 case BUILT_IN_ASSUME_ALIGNED
:
10581 case BUILT_IN_RETURN_ADDRESS
:
10582 case BUILT_IN_EXTRACT_RETURN_ADDR
:
10583 case BUILT_IN_FROB_RETURN_ADDR
:
10584 case BUILT_IN_RETURN
:
10585 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
10586 case BUILT_IN_FRAME_ADDRESS
:
10587 case BUILT_IN_VA_END
:
10588 case BUILT_IN_STACK_SAVE
:
10589 case BUILT_IN_STACK_RESTORE
:
10590 /* Exception state returns or moves registers around. */
10591 case BUILT_IN_EH_FILTER
:
10592 case BUILT_IN_EH_POINTER
:
10593 case BUILT_IN_EH_COPY_VALUES
:
10603 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10604 most probably expanded inline into reasonably simple code. This is a
10605 superset of is_simple_builtin. */
10607 is_inexpensive_builtin (tree decl
)
10611 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
10613 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10614 switch (DECL_FUNCTION_CODE (decl
))
10617 CASE_BUILT_IN_ALLOCA
:
10618 case BUILT_IN_BSWAP16
:
10619 case BUILT_IN_BSWAP32
:
10620 case BUILT_IN_BSWAP64
:
10622 case BUILT_IN_CLZIMAX
:
10623 case BUILT_IN_CLZL
:
10624 case BUILT_IN_CLZLL
:
10626 case BUILT_IN_CTZIMAX
:
10627 case BUILT_IN_CTZL
:
10628 case BUILT_IN_CTZLL
:
10630 case BUILT_IN_FFSIMAX
:
10631 case BUILT_IN_FFSL
:
10632 case BUILT_IN_FFSLL
:
10633 case BUILT_IN_IMAXABS
:
10634 case BUILT_IN_FINITE
:
10635 case BUILT_IN_FINITEF
:
10636 case BUILT_IN_FINITEL
:
10637 case BUILT_IN_FINITED32
:
10638 case BUILT_IN_FINITED64
:
10639 case BUILT_IN_FINITED128
:
10640 case BUILT_IN_FPCLASSIFY
:
10641 case BUILT_IN_ISFINITE
:
10642 case BUILT_IN_ISINF_SIGN
:
10643 case BUILT_IN_ISINF
:
10644 case BUILT_IN_ISINFF
:
10645 case BUILT_IN_ISINFL
:
10646 case BUILT_IN_ISINFD32
:
10647 case BUILT_IN_ISINFD64
:
10648 case BUILT_IN_ISINFD128
:
10649 case BUILT_IN_ISNAN
:
10650 case BUILT_IN_ISNANF
:
10651 case BUILT_IN_ISNANL
:
10652 case BUILT_IN_ISNAND32
:
10653 case BUILT_IN_ISNAND64
:
10654 case BUILT_IN_ISNAND128
:
10655 case BUILT_IN_ISNORMAL
:
10656 case BUILT_IN_ISGREATER
:
10657 case BUILT_IN_ISGREATEREQUAL
:
10658 case BUILT_IN_ISLESS
:
10659 case BUILT_IN_ISLESSEQUAL
:
10660 case BUILT_IN_ISLESSGREATER
:
10661 case BUILT_IN_ISUNORDERED
:
10662 case BUILT_IN_VA_ARG_PACK
:
10663 case BUILT_IN_VA_ARG_PACK_LEN
:
10664 case BUILT_IN_VA_COPY
:
10665 case BUILT_IN_TRAP
:
10666 case BUILT_IN_SAVEREGS
:
10667 case BUILT_IN_POPCOUNTL
:
10668 case BUILT_IN_POPCOUNTLL
:
10669 case BUILT_IN_POPCOUNTIMAX
:
10670 case BUILT_IN_POPCOUNT
:
10671 case BUILT_IN_PARITYL
:
10672 case BUILT_IN_PARITYLL
:
10673 case BUILT_IN_PARITYIMAX
:
10674 case BUILT_IN_PARITY
:
10675 case BUILT_IN_LABS
:
10676 case BUILT_IN_LLABS
:
10677 case BUILT_IN_PREFETCH
:
10678 case BUILT_IN_ACC_ON_DEVICE
:
10682 return is_simple_builtin (decl
);
10688 /* Return true if T is a constant and the value cast to a target char
10689 can be represented by a host char.
10690 Store the casted char constant in *P if so. */
10693 target_char_cst_p (tree t
, char *p
)
10695 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
10698 *p
= (char)tree_to_uhwi (t
);
10702 /* Return the maximum object size. */
10705 max_object_size (void)
10707 /* To do: Make this a configurable parameter. */
10708 return TYPE_MAX_VALUE (ptrdiff_type_node
);