1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79 #include "attr-fnspec.h"
81 struct target_builtins default_target_builtins
;
83 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
86 /* Define the names of the builtin function types and codes. */
87 const char *const built_in_class_names
[BUILT_IN_LAST
]
88 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
90 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
91 const char * built_in_names
[(int) END_BUILTINS
] =
93 #include "builtins.def"
96 /* Setup an array of builtin_info_type, make sure each element decl is
97 initialized to NULL_TREE. */
98 builtin_info_type builtin_info
[(int)END_BUILTINS
];
100 /* Non-zero if __builtin_constant_p should be folded right away. */
101 bool force_folding_builtin_constant_p
;
103 static int target_char_cast (tree
, char *);
104 static rtx
get_memory_rtx (tree
, tree
);
105 static int apply_args_size (void);
106 static int apply_result_size (void);
107 static rtx
result_vector (int, rtx
);
108 static void expand_builtin_prefetch (tree
);
109 static rtx
expand_builtin_apply_args (void);
110 static rtx
expand_builtin_apply_args_1 (void);
111 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
112 static void expand_builtin_return (rtx
);
113 static enum type_class
type_to_class (tree
);
114 static rtx
expand_builtin_classify_type (tree
);
115 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
117 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
118 static rtx
expand_builtin_sincos (tree
);
119 static rtx
expand_builtin_cexpi (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
121 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
122 static rtx
expand_builtin_next_arg (void);
123 static rtx
expand_builtin_va_start (tree
);
124 static rtx
expand_builtin_va_end (tree
);
125 static rtx
expand_builtin_va_copy (tree
);
126 static rtx
inline_expand_builtin_bytecmp (tree
, rtx
);
127 static rtx
expand_builtin_strcmp (tree
, rtx
);
128 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
129 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
130 static rtx
expand_builtin_memchr (tree
, rtx
);
131 static rtx
expand_builtin_memcpy (tree
, rtx
);
132 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
133 rtx target
, tree exp
,
136 static rtx
expand_builtin_memmove (tree
, rtx
);
137 static rtx
expand_builtin_mempcpy (tree
, rtx
);
138 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, memop_ret
);
139 static rtx
expand_builtin_strcat (tree
);
140 static rtx
expand_builtin_strcpy (tree
, rtx
);
141 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
142 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
143 static rtx
expand_builtin_stpncpy (tree
, rtx
);
144 static rtx
expand_builtin_strncat (tree
, rtx
);
145 static rtx
expand_builtin_strncpy (tree
, rtx
);
146 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
147 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
148 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
149 static rtx
expand_builtin_bzero (tree
);
150 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
151 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
152 static rtx
expand_builtin_alloca (tree
);
153 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
154 static rtx
expand_builtin_frame_address (tree
, tree
);
155 static tree
stabilize_va_list_loc (location_t
, tree
, int);
156 static rtx
expand_builtin_expect (tree
, rtx
);
157 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
158 static tree
fold_builtin_constant_p (tree
);
159 static tree
fold_builtin_classify_type (tree
);
160 static tree
fold_builtin_strlen (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_inf (location_t
, tree
, int);
162 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
163 static bool validate_arg (const_tree
, enum tree_code code
);
164 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
165 static rtx
expand_builtin_signbit (tree
, rtx
);
166 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
167 static tree
fold_builtin_isascii (location_t
, tree
);
168 static tree
fold_builtin_toascii (location_t
, tree
);
169 static tree
fold_builtin_isdigit (location_t
, tree
);
170 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
171 static tree
fold_builtin_abs (location_t
, tree
, tree
);
172 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
174 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
176 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
, tree
);
177 static tree
fold_builtin_strspn (location_t
, tree
, tree
, tree
);
178 static tree
fold_builtin_strcspn (location_t
, tree
, tree
, tree
);
180 static rtx
expand_builtin_object_size (tree
);
181 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
182 enum built_in_function
);
183 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
184 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
185 static void maybe_emit_free_warning (tree
);
186 static tree
fold_builtin_object_size (tree
, tree
);
187 static bool check_read_access (tree
, tree
, tree
= NULL_TREE
, int = 1);
189 unsigned HOST_WIDE_INT target_newline
;
190 unsigned HOST_WIDE_INT target_percent
;
191 static unsigned HOST_WIDE_INT target_c
;
192 static unsigned HOST_WIDE_INT target_s
;
193 char target_percent_c
[3];
194 char target_percent_s
[3];
195 char target_percent_s_newline
[4];
196 static tree
do_mpfr_remquo (tree
, tree
, tree
);
197 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
198 static void expand_builtin_sync_synchronize (void);
200 access_ref::access_ref (tree bound
/* = NULL_TREE */,
201 bool minaccess
/* = false */)
202 : ref (), eval ([](tree x
){ return x
; }), trail1special (true), base0 (true)
205 offrng
[0] = offrng
[1] = 0;
207 sizrng
[0] = sizrng
[1] = -1;
209 /* Set the default bounds of the access and adjust below. */
210 bndrng
[0] = minaccess
? 1 : 0;
211 bndrng
[1] = HOST_WIDE_INT_M1U
;
213 /* When BOUND is nonnull and a range can be extracted from it,
214 set the bounds of the access to reflect both it and MINACCESS.
215 BNDRNG[0] is the size of the minimum access. */
217 if (bound
&& get_size_range (bound
, rng
, SR_ALLOW_ZERO
))
219 bndrng
[0] = wi::to_offset (rng
[0]);
220 bndrng
[1] = wi::to_offset (rng
[1]);
221 bndrng
[0] = bndrng
[0] > 0 && minaccess
? 1 : 0;
225 /* Return the maximum amount of space remaining and if non-null, set
226 argument to the minimum. */
229 access_ref::size_remaining (offset_int
*pmin
/* = NULL */) const
235 /* add_offset() ensures the offset range isn't inverted. */
236 gcc_checking_assert (offrng
[0] <= offrng
[1]);
240 /* The offset into referenced object is zero-based (i.e., it's
241 not referenced by a pointer into middle of some unknown object). */
242 if (offrng
[0] < 0 && offrng
[1] < 0)
244 /* If the offset is negative the remaining size is zero. */
249 if (sizrng
[1] <= offrng
[0])
251 /* If the starting offset is greater than or equal to the upper
252 bound on the size of the object, the space remaining is zero.
253 As a special case, if it's equal, set *PMIN to -1 to let
254 the caller know the offset is valid and just past the end. */
255 *pmin
= sizrng
[1] == offrng
[0] ? -1 : 0;
259 /* Otherwise return the size minus the lower bound of the offset. */
260 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
262 *pmin
= sizrng
[0] - or0
;
263 return sizrng
[1] - or0
;
266 /* The offset to the referenced object isn't zero-based (i.e., it may
267 refer to a byte other than the first. The size of such an object
268 is constrained only by the size of the address space (the result
269 of max_object_size()). */
270 if (sizrng
[1] <= offrng
[0])
276 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
278 *pmin
= sizrng
[0] - or0
;
279 return sizrng
[1] - or0
;
282 /* Add the range [MIN, MAX] to the offset range. For known objects (with
283 zero-based offsets) at least one of whose offset's bounds is in range,
284 constrain the other (or both) to the bounds of the object (i.e., zero
285 and the upper bound of its size). This improves the quality of
288 void access_ref::add_offset (const offset_int
&min
, const offset_int
&max
)
292 /* To add an ordinary range just add it to the bounds. */
298 /* To add an inverted range to an offset to an unknown object
299 expand it to the maximum. */
305 /* To add an inverted range to an offset to an known object set
306 the upper bound to the maximum representable offset value
307 (which may be greater than MAX_OBJECT_SIZE).
308 The lower bound is either the sum of the current offset and
309 MIN when abs(MAX) is greater than the former, or zero otherwise.
310 Zero because then then inverted range includes the negative of
312 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
322 offset_int absmax
= wi::abs (max
);
323 if (offrng
[0] < absmax
)
326 /* Cap the lower bound at the upper (set to MAXOFF above)
327 to avoid inadvertently recreating an inverted range. */
328 if (offrng
[1] < offrng
[0])
329 offrng
[0] = offrng
[1];
338 /* When referencing a known object check to see if the offset computed
339 so far is in bounds... */
340 offset_int remrng
[2];
341 remrng
[1] = size_remaining (remrng
);
342 if (remrng
[1] > 0 || remrng
[0] < 0)
344 /* ...if so, constrain it so that neither bound exceeds the size of
345 the object. Out of bounds offsets are left unchanged, and, for
346 better or worse, become in bounds later. They should be detected
347 and diagnosed at the point they first become invalid by
351 if (offrng
[1] > sizrng
[1])
352 offrng
[1] = sizrng
[1];
356 /* Return true if NAME starts with __builtin_ or __sync_. */
359 is_builtin_name (const char *name
)
361 if (strncmp (name
, "__builtin_", 10) == 0)
363 if (strncmp (name
, "__sync_", 7) == 0)
365 if (strncmp (name
, "__atomic_", 9) == 0)
370 /* Return true if NODE should be considered for inline expansion regardless
371 of the optimization level. This means whenever a function is invoked with
372 its "internal" name, which normally contains the prefix "__builtin". */
375 called_as_built_in (tree node
)
377 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
378 we want the name used to call the function, not the name it
380 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
381 return is_builtin_name (name
);
384 /* Compute values M and N such that M divides (address of EXP - N) and such
385 that N < M. If these numbers can be determined, store M in alignp and N in
386 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
387 *alignp and any bit-offset to *bitposp.
389 Note that the address (and thus the alignment) computed here is based
390 on the address to which a symbol resolves, whereas DECL_ALIGN is based
391 on the address at which an object is actually located. These two
392 addresses are not always the same. For example, on ARM targets,
393 the address &foo of a Thumb function foo() has the lowest bit set,
394 whereas foo() itself starts on an even address.
396 If ADDR_P is true we are taking the address of the memory reference EXP
397 and thus cannot rely on the access taking place. */
400 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
401 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
403 poly_int64 bitsize
, bitpos
;
406 int unsignedp
, reversep
, volatilep
;
407 unsigned int align
= BITS_PER_UNIT
;
408 bool known_alignment
= false;
410 /* Get the innermost object and the constant (bitpos) and possibly
411 variable (offset) offset of the access. */
412 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
413 &unsignedp
, &reversep
, &volatilep
);
415 /* Extract alignment information from the innermost object and
416 possibly adjust bitpos and offset. */
417 if (TREE_CODE (exp
) == FUNCTION_DECL
)
419 /* Function addresses can encode extra information besides their
420 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
421 allows the low bit to be used as a virtual bit, we know
422 that the address itself must be at least 2-byte aligned. */
423 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
424 align
= 2 * BITS_PER_UNIT
;
426 else if (TREE_CODE (exp
) == LABEL_DECL
)
428 else if (TREE_CODE (exp
) == CONST_DECL
)
430 /* The alignment of a CONST_DECL is determined by its initializer. */
431 exp
= DECL_INITIAL (exp
);
432 align
= TYPE_ALIGN (TREE_TYPE (exp
));
433 if (CONSTANT_CLASS_P (exp
))
434 align
= targetm
.constant_alignment (exp
, align
);
436 known_alignment
= true;
438 else if (DECL_P (exp
))
440 align
= DECL_ALIGN (exp
);
441 known_alignment
= true;
443 else if (TREE_CODE (exp
) == INDIRECT_REF
444 || TREE_CODE (exp
) == MEM_REF
445 || TREE_CODE (exp
) == TARGET_MEM_REF
)
447 tree addr
= TREE_OPERAND (exp
, 0);
449 unsigned HOST_WIDE_INT ptr_bitpos
;
450 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
452 /* If the address is explicitely aligned, handle that. */
453 if (TREE_CODE (addr
) == BIT_AND_EXPR
454 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
456 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
457 ptr_bitmask
*= BITS_PER_UNIT
;
458 align
= least_bit_hwi (ptr_bitmask
);
459 addr
= TREE_OPERAND (addr
, 0);
463 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
464 align
= MAX (ptr_align
, align
);
466 /* Re-apply explicit alignment to the bitpos. */
467 ptr_bitpos
&= ptr_bitmask
;
469 /* The alignment of the pointer operand in a TARGET_MEM_REF
470 has to take the variable offset parts into account. */
471 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
475 unsigned HOST_WIDE_INT step
= 1;
477 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
478 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
480 if (TMR_INDEX2 (exp
))
481 align
= BITS_PER_UNIT
;
482 known_alignment
= false;
485 /* When EXP is an actual memory reference then we can use
486 TYPE_ALIGN of a pointer indirection to derive alignment.
487 Do so only if get_pointer_alignment_1 did not reveal absolute
488 alignment knowledge and if using that alignment would
489 improve the situation. */
491 if (!addr_p
&& !known_alignment
492 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
497 /* Else adjust bitpos accordingly. */
498 bitpos
+= ptr_bitpos
;
499 if (TREE_CODE (exp
) == MEM_REF
500 || TREE_CODE (exp
) == TARGET_MEM_REF
)
501 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
504 else if (TREE_CODE (exp
) == STRING_CST
)
506 /* STRING_CST are the only constant objects we allow to be not
507 wrapped inside a CONST_DECL. */
508 align
= TYPE_ALIGN (TREE_TYPE (exp
));
509 if (CONSTANT_CLASS_P (exp
))
510 align
= targetm
.constant_alignment (exp
, align
);
512 known_alignment
= true;
515 /* If there is a non-constant offset part extract the maximum
516 alignment that can prevail. */
519 unsigned int trailing_zeros
= tree_ctz (offset
);
520 if (trailing_zeros
< HOST_BITS_PER_INT
)
522 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
524 align
= MIN (align
, inner
);
528 /* Account for the alignment of runtime coefficients, so that the constant
529 bitpos is guaranteed to be accurate. */
530 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
531 if (alt_align
!= 0 && alt_align
< align
)
534 known_alignment
= false;
538 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
539 return known_alignment
;
542 /* For a memory reference expression EXP compute values M and N such that M
543 divides (&EXP - N) and such that N < M. If these numbers can be determined,
544 store M in alignp and N in *BITPOSP and return true. Otherwise return false
545 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
548 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
549 unsigned HOST_WIDE_INT
*bitposp
)
551 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
554 /* Return the alignment in bits of EXP, an object. */
557 get_object_alignment (tree exp
)
559 unsigned HOST_WIDE_INT bitpos
= 0;
562 get_object_alignment_1 (exp
, &align
, &bitpos
);
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
568 align
= least_bit_hwi (bitpos
);
572 /* For a pointer valued expression EXP compute values M and N such that M
573 divides (EXP - N) and such that N < M. If these numbers can be determined,
574 store M in alignp and N in *BITPOSP and return true. Return false if
575 the results are just a conservative approximation.
577 If EXP is not a pointer, false is returned too. */
580 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
581 unsigned HOST_WIDE_INT
*bitposp
)
585 if (TREE_CODE (exp
) == ADDR_EXPR
)
586 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
587 alignp
, bitposp
, true);
588 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
591 unsigned HOST_WIDE_INT bitpos
;
592 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
594 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
595 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
598 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
599 if (trailing_zeros
< HOST_BITS_PER_INT
)
601 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
603 align
= MIN (align
, inner
);
607 *bitposp
= bitpos
& (align
- 1);
610 else if (TREE_CODE (exp
) == SSA_NAME
611 && POINTER_TYPE_P (TREE_TYPE (exp
)))
613 unsigned int ptr_align
, ptr_misalign
;
614 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
616 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
618 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
619 *alignp
= ptr_align
* BITS_PER_UNIT
;
620 /* Make sure to return a sensible alignment when the multiplication
621 by BITS_PER_UNIT overflowed. */
623 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
624 /* We cannot really tell whether this result is an approximation. */
630 *alignp
= BITS_PER_UNIT
;
634 else if (TREE_CODE (exp
) == INTEGER_CST
)
636 *alignp
= BIGGEST_ALIGNMENT
;
637 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
638 & (BIGGEST_ALIGNMENT
- 1));
643 *alignp
= BITS_PER_UNIT
;
647 /* Return the alignment in bits of EXP, a pointer valued expression.
648 The alignment returned is, by default, the alignment of the thing that
649 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
651 Otherwise, look at the expression to see if we can do better, i.e., if the
652 expression is actually pointing at an object whose alignment is tighter. */
655 get_pointer_alignment (tree exp
)
657 unsigned HOST_WIDE_INT bitpos
= 0;
660 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
662 /* align and bitpos now specify known low bits of the pointer.
663 ptr & (align - 1) == bitpos. */
666 align
= least_bit_hwi (bitpos
);
671 /* Return the number of leading non-zero elements in the sequence
672 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
673 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
676 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
678 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
684 /* Optimize the common case of plain char. */
685 for (n
= 0; n
< maxelts
; n
++)
687 const char *elt
= (const char*) ptr
+ n
;
694 for (n
= 0; n
< maxelts
; n
++)
696 const char *elt
= (const char*) ptr
+ n
* eltsize
;
697 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
704 /* For a call EXPR at LOC to a function FNAME that expects a string
705 in the argument ARG, issue a diagnostic due to it being a called
706 with an argument that is a character array with no terminating
707 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
708 of characters in which the NUL is expected. Either EXPR or FNAME
709 may be null but noth both. SIZE may be null when BNDRNG is null. */
712 warn_string_no_nul (location_t loc
, tree expr
, const char *fname
,
713 tree arg
, tree decl
, tree size
/* = NULL_TREE */,
714 bool exact
/* = false */,
715 const wide_int bndrng
[2] /* = NULL */)
717 if ((expr
&& TREE_NO_WARNING (expr
)) || TREE_NO_WARNING (arg
))
720 loc
= expansion_point_location_if_in_system_header (loc
);
723 /* Format the bound range as a string to keep the nuber of messages
729 if (bndrng
[0] == bndrng
[1])
730 sprintf (bndstr
, "%llu", (unsigned long long) bndrng
[0].to_uhwi ());
732 sprintf (bndstr
, "[%llu, %llu]",
733 (unsigned long long) bndrng
[0].to_uhwi (),
734 (unsigned long long) bndrng
[1].to_uhwi ());
737 const tree maxobjsize
= max_object_size ();
738 const wide_int maxsiz
= wi::to_wide (maxobjsize
);
741 tree func
= get_callee_fndecl (expr
);
744 if (wi::ltu_p (maxsiz
, bndrng
[0]))
745 warned
= warning_at (loc
, OPT_Wstringop_overread
,
746 "%K%qD specified bound %s exceeds "
747 "maximum object size %E",
748 expr
, func
, bndstr
, maxobjsize
);
751 bool maybe
= wi::to_wide (size
) == bndrng
[0];
752 warned
= warning_at (loc
, OPT_Wstringop_overread
,
754 ? G_("%K%qD specified bound %s exceeds "
755 "the size %E of unterminated array")
757 ? G_("%K%qD specified bound %s may "
758 "exceed the size of at most %E "
759 "of unterminated array")
760 : G_("%K%qD specified bound %s exceeds "
761 "the size of at most %E "
762 "of unterminated array")),
763 expr
, func
, bndstr
, size
);
767 warned
= warning_at (loc
, OPT_Wstringop_overread
,
768 "%K%qD argument missing terminating nul",
775 if (wi::ltu_p (maxsiz
, bndrng
[0]))
776 warned
= warning_at (loc
, OPT_Wstringop_overread
,
777 "%qs specified bound %s exceeds "
778 "maximum object size %E",
779 fname
, bndstr
, maxobjsize
);
782 bool maybe
= wi::to_wide (size
) == bndrng
[0];
783 warned
= warning_at (loc
, OPT_Wstringop_overread
,
785 ? G_("%qs specified bound %s exceeds "
786 "the size %E of unterminated array")
788 ? G_("%qs specified bound %s may "
789 "exceed the size of at most %E "
790 "of unterminated array")
791 : G_("%qs specified bound %s exceeds "
792 "the size of at most %E "
793 "of unterminated array")),
794 fname
, bndstr
, size
);
798 warned
= warning_at (loc
, OPT_Wstringop_overread
,
799 "%qsargument missing terminating nul",
805 inform (DECL_SOURCE_LOCATION (decl
),
806 "referenced argument declared here");
807 TREE_NO_WARNING (arg
) = 1;
809 TREE_NO_WARNING (expr
) = 1;
813 /* For a call EXPR (which may be null) that expects a string argument
814 SRC as an argument, returns false if SRC is a character array with
815 no terminating NUL. When nonnull, BOUND is the number of characters
816 in which to expect the terminating NUL. RDONLY is true for read-only
817 accesses such as strcmp, false for read-write such as strcpy. When
818 EXPR is also issues a warning. */
821 check_nul_terminated_array (tree expr
, tree src
,
822 tree bound
/* = NULL_TREE */)
824 /* The constant size of the array SRC points to. The actual size
825 may be less of EXACT is true, but not more. */
827 /* True if SRC involves a non-constant offset into the array. */
829 /* The unterminated constant array SRC points to. */
830 tree nonstr
= unterminated_array (src
, &size
, &exact
);
834 /* NONSTR refers to the non-nul terminated constant array and SIZE
835 is the constant size of the array in bytes. EXACT is true when
841 if (TREE_CODE (bound
) == INTEGER_CST
)
842 bndrng
[0] = bndrng
[1] = wi::to_wide (bound
);
845 value_range_kind rng
= get_range_info (bound
, bndrng
, bndrng
+ 1);
852 if (wi::leu_p (bndrng
[0], wi::to_wide (size
)))
855 else if (wi::lt_p (bndrng
[0], wi::to_wide (size
), UNSIGNED
))
860 warn_string_no_nul (EXPR_LOCATION (expr
), expr
, NULL
, src
, nonstr
,
861 size
, exact
, bound
? bndrng
: NULL
);
866 /* If EXP refers to an unterminated constant character array return
867 the declaration of the object of which the array is a member or
868 element and if SIZE is not null, set *SIZE to the size of
869 the unterminated array and set *EXACT if the size is exact or
870 clear it otherwise. Otherwise return null. */
873 unterminated_array (tree exp
, tree
*size
/* = NULL */, bool *exact
/* = NULL */)
875 /* C_STRLEN will return NULL and set DECL in the info
876 structure if EXP references a unterminated array. */
877 c_strlen_data lendata
= { };
878 tree len
= c_strlen (exp
, 1, &lendata
);
879 if (len
== NULL_TREE
&& lendata
.minlen
&& lendata
.decl
)
883 len
= lendata
.minlen
;
886 /* Constant offsets are already accounted for in LENDATA.MINLEN,
887 but not in a SSA_NAME + CST expression. */
888 if (TREE_CODE (lendata
.off
) == INTEGER_CST
)
890 else if (TREE_CODE (lendata
.off
) == PLUS_EXPR
891 && TREE_CODE (TREE_OPERAND (lendata
.off
, 1)) == INTEGER_CST
)
893 /* Subtract the offset from the size of the array. */
895 tree temp
= TREE_OPERAND (lendata
.off
, 1);
896 temp
= fold_convert (ssizetype
, temp
);
897 len
= fold_build2 (MINUS_EXPR
, ssizetype
, len
, temp
);
913 /* Compute the length of a null-terminated character string or wide
914 character string handling character sizes of 1, 2, and 4 bytes.
915 TREE_STRING_LENGTH is not the right way because it evaluates to
916 the size of the character array in bytes (as opposed to characters)
917 and because it can contain a zero byte in the middle.
919 ONLY_VALUE should be nonzero if the result is not going to be emitted
920 into the instruction stream and zero if it is going to be expanded.
921 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
922 is returned, otherwise NULL, since
923 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
924 evaluate the side-effects.
926 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
927 accesses. Note that this implies the result is not going to be emitted
928 into the instruction stream.
930 Additional information about the string accessed may be recorded
931 in DATA. For example, if ARG references an unterminated string,
932 then the declaration will be stored in the DECL field. If the
933 length of the unterminated string can be determined, it'll be
934 stored in the LEN field. Note this length could well be different
935 than what a C strlen call would return.
937 ELTSIZE is 1 for normal single byte character strings, and 2 or
938 4 for wide characer strings. ELTSIZE is by default 1.
940 The value returned is of type `ssizetype'. */
943 c_strlen (tree arg
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
945 /* If we were not passed a DATA pointer, then get one to a local
946 structure. That avoids having to check DATA for NULL before
947 each time we want to use it. */
948 c_strlen_data local_strlen_data
= { };
950 data
= &local_strlen_data
;
952 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
954 tree src
= STRIP_NOPS (arg
);
955 if (TREE_CODE (src
) == COND_EXPR
956 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
960 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
961 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
962 if (tree_int_cst_equal (len1
, len2
))
966 if (TREE_CODE (src
) == COMPOUND_EXPR
967 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
968 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
970 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
972 /* Offset from the beginning of the string in bytes. */
976 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
980 /* Determine the size of the string element. */
981 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
984 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
985 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
986 in case the latter is less than the size of the array, such as when
987 SRC refers to a short string literal used to initialize a large array.
988 In that case, the elements of the array after the terminating NUL are
990 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
991 strelts
= strelts
/ eltsize
;
993 if (!tree_fits_uhwi_p (memsize
))
996 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
998 /* PTR can point to the byte representation of any string type, including
999 char* and wchar_t*. */
1000 const char *ptr
= TREE_STRING_POINTER (src
);
1002 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
1004 /* The code below works only for single byte character types. */
1008 /* If the string has an internal NUL character followed by any
1009 non-NUL characters (e.g., "foo\0bar"), we can't compute
1010 the offset to the following NUL if we don't know where to
1011 start searching for it. */
1012 unsigned len
= string_length (ptr
, eltsize
, strelts
);
1014 /* Return when an embedded null character is found or none at all.
1015 In the latter case, set the DECL/LEN field in the DATA structure
1016 so that callers may examine them. */
1017 if (len
+ 1 < strelts
)
1019 else if (len
>= maxelts
)
1022 data
->off
= byteoff
;
1023 data
->minlen
= ssize_int (len
);
1027 /* For empty strings the result should be zero. */
1029 return ssize_int (0);
1031 /* We don't know the starting offset, but we do know that the string
1032 has no internal zero bytes. If the offset falls within the bounds
1033 of the string subtract the offset from the length of the string,
1034 and return that. Otherwise the length is zero. Take care to
1035 use SAVE_EXPR in case the OFFSET has side-effects. */
1036 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
1038 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
1039 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
1041 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
1043 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
1044 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
1045 build_zero_cst (ssizetype
));
1048 /* Offset from the beginning of the string in elements. */
1049 HOST_WIDE_INT eltoff
;
1051 /* We have a known offset into the string. Start searching there for
1052 a null character if we can represent it as a single HOST_WIDE_INT. */
1055 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
1058 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
1060 /* If the offset is known to be out of bounds, warn, and call strlen at
1062 if (eltoff
< 0 || eltoff
>= maxelts
)
1064 /* Suppress multiple warnings for propagated constant strings. */
1066 && !TREE_NO_WARNING (arg
)
1067 && warning_at (loc
, OPT_Warray_bounds
,
1068 "offset %qwi outside bounds of constant string",
1072 inform (DECL_SOURCE_LOCATION (decl
), "%qE declared here", decl
);
1073 TREE_NO_WARNING (arg
) = 1;
1078 /* If eltoff is larger than strelts but less than maxelts the
1079 string length is zero, since the excess memory will be zero. */
1080 if (eltoff
> strelts
)
1081 return ssize_int (0);
1083 /* Use strlen to search for the first zero byte. Since any strings
1084 constructed with build_string will have nulls appended, we win even
1085 if we get handed something like (char[4])"abcd".
1087 Since ELTOFF is our starting index into the string, no further
1088 calculation is needed. */
1089 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
1092 /* Don't know what to return if there was no zero termination.
1093 Ideally this would turn into a gcc_checking_assert over time.
1094 Set DECL/LEN so callers can examine them. */
1095 if (len
>= maxelts
- eltoff
)
1098 data
->off
= byteoff
;
1099 data
->minlen
= ssize_int (len
);
1103 return ssize_int (len
);
1106 /* Return a constant integer corresponding to target reading
1107 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1108 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1109 are assumed to be zero, otherwise it reads as many characters
1113 c_readstr (const char *str
, scalar_int_mode mode
,
1114 bool null_terminated_p
/*=true*/)
1118 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
1120 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
1121 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
1122 / HOST_BITS_PER_WIDE_INT
;
1124 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
1125 for (i
= 0; i
< len
; i
++)
1129 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
1132 if (WORDS_BIG_ENDIAN
)
1133 j
= GET_MODE_SIZE (mode
) - i
- 1;
1134 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
1135 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
1136 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
1139 if (ch
|| !null_terminated_p
)
1140 ch
= (unsigned char) str
[i
];
1141 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
1144 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
1145 return immed_wide_int_const (c
, mode
);
1148 /* Cast a target constant CST to target CHAR and if that value fits into
1149 host char type, return zero and put that value into variable pointed to by
1153 target_char_cast (tree cst
, char *p
)
1155 unsigned HOST_WIDE_INT val
, hostval
;
1157 if (TREE_CODE (cst
) != INTEGER_CST
1158 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
1161 /* Do not care if it fits or not right here. */
1162 val
= TREE_INT_CST_LOW (cst
);
1164 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
1165 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
1168 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
1169 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
1178 /* Similar to save_expr, but assumes that arbitrary code is not executed
1179 in between the multiple evaluations. In particular, we assume that a
1180 non-addressable local variable will not be modified. */
1183 builtin_save_expr (tree exp
)
1185 if (TREE_CODE (exp
) == SSA_NAME
1186 || (TREE_ADDRESSABLE (exp
) == 0
1187 && (TREE_CODE (exp
) == PARM_DECL
1188 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
1191 return save_expr (exp
);
1194 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1195 times to get the address of either a higher stack frame, or a return
1196 address located within it (depending on FNDECL_CODE). */
1199 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
1202 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
1203 if (tem
== NULL_RTX
)
1205 /* For a zero count with __builtin_return_address, we don't care what
1206 frame address we return, because target-specific definitions will
1207 override us. Therefore frame pointer elimination is OK, and using
1208 the soft frame pointer is OK.
1210 For a nonzero count, or a zero count with __builtin_frame_address,
1211 we require a stable offset from the current frame pointer to the
1212 previous one, so we must use the hard frame pointer, and
1213 we must disable frame pointer elimination. */
1214 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
1215 tem
= frame_pointer_rtx
;
1218 tem
= hard_frame_pointer_rtx
;
1220 /* Tell reload not to eliminate the frame pointer. */
1221 crtl
->accesses_prior_frames
= 1;
1226 SETUP_FRAME_ADDRESSES ();
1228 /* On the SPARC, the return address is not in the frame, it is in a
1229 register. There is no way to access it off of the current frame
1230 pointer, but it can be accessed off the previous frame pointer by
1231 reading the value from the register window save area. */
1232 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
1235 /* Scan back COUNT frames to the specified frame. */
1236 for (i
= 0; i
< count
; i
++)
1238 /* Assume the dynamic chain pointer is in the word that the
1239 frame address points to, unless otherwise specified. */
1240 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
1241 tem
= memory_address (Pmode
, tem
);
1242 tem
= gen_frame_mem (Pmode
, tem
);
1243 tem
= copy_to_reg (tem
);
1246 /* For __builtin_frame_address, return what we've got. But, on
1247 the SPARC for example, we may have to add a bias. */
1248 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
1249 return FRAME_ADDR_RTX (tem
);
1251 /* For __builtin_return_address, get the return address from that frame. */
1252 #ifdef RETURN_ADDR_RTX
1253 tem
= RETURN_ADDR_RTX (count
, tem
);
1255 tem
= memory_address (Pmode
,
1256 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
1257 tem
= gen_frame_mem (Pmode
, tem
);
1262 /* Alias set used for setjmp buffer. */
1263 static alias_set_type setjmp_alias_set
= -1;
1265 /* Construct the leading half of a __builtin_setjmp call. Control will
1266 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1267 exception handling code. */
1270 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
1272 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1276 if (setjmp_alias_set
== -1)
1277 setjmp_alias_set
= new_alias_set ();
1279 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1281 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
1283 /* We store the frame pointer and the address of receiver_label in
1284 the buffer and use the rest of it for the stack save area, which
1285 is machine-dependent. */
1287 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
1288 set_mem_alias_set (mem
, setjmp_alias_set
);
1289 emit_move_insn (mem
, hard_frame_pointer_rtx
);
1291 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1292 GET_MODE_SIZE (Pmode
))),
1293 set_mem_alias_set (mem
, setjmp_alias_set
);
1295 emit_move_insn (validize_mem (mem
),
1296 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
1298 stack_save
= gen_rtx_MEM (sa_mode
,
1299 plus_constant (Pmode
, buf_addr
,
1300 2 * GET_MODE_SIZE (Pmode
)));
1301 set_mem_alias_set (stack_save
, setjmp_alias_set
);
1302 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1304 /* If there is further processing to do, do it. */
1305 if (targetm
.have_builtin_setjmp_setup ())
1306 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
1308 /* We have a nonlocal label. */
1309 cfun
->has_nonlocal_label
= 1;
1312 /* Construct the trailing part of a __builtin_setjmp call. This is
1313 also called directly by the SJLJ exception handling code.
1314 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1317 expand_builtin_setjmp_receiver (rtx receiver_label
)
1321 /* Mark the FP as used when we get here, so we have to make sure it's
1322 marked as used by this function. */
1323 emit_use (hard_frame_pointer_rtx
);
1325 /* Mark the static chain as clobbered here so life information
1326 doesn't get messed up for it. */
1327 chain
= rtx_for_static_chain (current_function_decl
, true);
1328 if (chain
&& REG_P (chain
))
1329 emit_clobber (chain
);
1331 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
1333 /* If the argument pointer can be eliminated in favor of the
1334 frame pointer, we don't need to restore it. We assume here
1335 that if such an elimination is present, it can always be used.
1336 This is the case on all known machines; if we don't make this
1337 assumption, we do unnecessary saving on many machines. */
1339 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1341 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1342 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1343 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1346 if (i
== ARRAY_SIZE (elim_regs
))
1348 /* Now restore our arg pointer from the address at which it
1349 was saved in our stack frame. */
1350 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1351 copy_to_reg (get_arg_pointer_save_area ()));
1355 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
1356 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
1357 else if (targetm
.have_nonlocal_goto_receiver ())
1358 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
1362 /* We must not allow the code we just generated to be reordered by
1363 scheduling. Specifically, the update of the frame pointer must
1364 happen immediately, not later. */
1365 emit_insn (gen_blockage ());
1368 /* __builtin_longjmp is passed a pointer to an array of five words (not
1369 all will be used on all machines). It operates similarly to the C
1370 library function of the same name, but is more efficient. Much of
1371 the code below is copied from the handling of non-local gotos. */
1374 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1377 rtx_insn
*insn
, *last
;
1378 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1380 /* DRAP is needed for stack realign if longjmp is expanded to current
1382 if (SUPPORTS_STACK_ALIGNMENT
)
1383 crtl
->need_drap
= true;
1385 if (setjmp_alias_set
== -1)
1386 setjmp_alias_set
= new_alias_set ();
1388 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1390 buf_addr
= force_reg (Pmode
, buf_addr
);
1392 /* We require that the user must pass a second argument of 1, because
1393 that is what builtin_setjmp will return. */
1394 gcc_assert (value
== const1_rtx
);
1396 last
= get_last_insn ();
1397 if (targetm
.have_builtin_longjmp ())
1398 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1401 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1402 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1403 GET_MODE_SIZE (Pmode
)));
1405 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1406 2 * GET_MODE_SIZE (Pmode
)));
1407 set_mem_alias_set (fp
, setjmp_alias_set
);
1408 set_mem_alias_set (lab
, setjmp_alias_set
);
1409 set_mem_alias_set (stack
, setjmp_alias_set
);
1411 /* Pick up FP, label, and SP from the block and jump. This code is
1412 from expand_goto in stmt.c; see there for detailed comments. */
1413 if (targetm
.have_nonlocal_goto ())
1414 /* We have to pass a value to the nonlocal_goto pattern that will
1415 get copied into the static_chain pointer, but it does not matter
1416 what that value is, because builtin_setjmp does not use it. */
1417 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1420 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1421 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1423 lab
= copy_to_reg (lab
);
1425 /* Restore the frame pointer and stack pointer. We must use a
1426 temporary since the setjmp buffer may be a local. */
1427 fp
= copy_to_reg (fp
);
1428 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1430 /* Ensure the frame pointer move is not optimized. */
1431 emit_insn (gen_blockage ());
1432 emit_clobber (hard_frame_pointer_rtx
);
1433 emit_clobber (frame_pointer_rtx
);
1434 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1436 emit_use (hard_frame_pointer_rtx
);
1437 emit_use (stack_pointer_rtx
);
1438 emit_indirect_jump (lab
);
1442 /* Search backwards and mark the jump insn as a non-local goto.
1443 Note that this precludes the use of __builtin_longjmp to a
1444 __builtin_setjmp target in the same function. However, we've
1445 already cautioned the user that these functions are for
1446 internal exception handling use only. */
1447 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1449 gcc_assert (insn
!= last
);
1453 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1456 else if (CALL_P (insn
))
1462 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1464 return (iter
->i
< iter
->n
);
1467 /* This function validates the types of a function call argument list
1468 against a specified list of tree_codes. If the last specifier is a 0,
1469 that represents an ellipsis, otherwise the last specifier must be a
1473 validate_arglist (const_tree callexpr
, ...)
1475 enum tree_code code
;
1478 const_call_expr_arg_iterator iter
;
1481 va_start (ap
, callexpr
);
1482 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1484 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1485 tree fn
= CALL_EXPR_FN (callexpr
);
1486 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1488 for (unsigned argno
= 1; ; ++argno
)
1490 code
= (enum tree_code
) va_arg (ap
, int);
1495 /* This signifies an ellipses, any further arguments are all ok. */
1499 /* This signifies an endlink, if no arguments remain, return
1500 true, otherwise return false. */
1501 res
= !more_const_call_expr_args_p (&iter
);
1504 /* The actual argument must be nonnull when either the whole
1505 called function has been declared nonnull, or when the formal
1506 argument corresponding to the actual argument has been. */
1508 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1510 arg
= next_const_call_expr_arg (&iter
);
1511 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1517 /* If no parameters remain or the parameter's code does not
1518 match the specified code, return false. Otherwise continue
1519 checking any remaining arguments. */
1520 arg
= next_const_call_expr_arg (&iter
);
1521 if (!validate_arg (arg
, code
))
1527 /* We need gotos here since we can only have one VA_CLOSE in a
1532 BITMAP_FREE (argmap
);
1537 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1538 and the address of the save area. */
1541 expand_builtin_nonlocal_goto (tree exp
)
1543 tree t_label
, t_save_area
;
1544 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1547 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1550 t_label
= CALL_EXPR_ARG (exp
, 0);
1551 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1553 r_label
= expand_normal (t_label
);
1554 r_label
= convert_memory_address (Pmode
, r_label
);
1555 r_save_area
= expand_normal (t_save_area
);
1556 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1557 /* Copy the address of the save location to a register just in case it was
1558 based on the frame pointer. */
1559 r_save_area
= copy_to_reg (r_save_area
);
1560 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1561 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1562 plus_constant (Pmode
, r_save_area
,
1563 GET_MODE_SIZE (Pmode
)));
1565 crtl
->has_nonlocal_goto
= 1;
1567 /* ??? We no longer need to pass the static chain value, afaik. */
1568 if (targetm
.have_nonlocal_goto ())
1569 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1572 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1573 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1575 r_label
= copy_to_reg (r_label
);
1577 /* Restore the frame pointer and stack pointer. We must use a
1578 temporary since the setjmp buffer may be a local. */
1579 r_fp
= copy_to_reg (r_fp
);
1580 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1582 /* Ensure the frame pointer move is not optimized. */
1583 emit_insn (gen_blockage ());
1584 emit_clobber (hard_frame_pointer_rtx
);
1585 emit_clobber (frame_pointer_rtx
);
1586 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1588 /* USE of hard_frame_pointer_rtx added for consistency;
1589 not clear if really needed. */
1590 emit_use (hard_frame_pointer_rtx
);
1591 emit_use (stack_pointer_rtx
);
1593 /* If the architecture is using a GP register, we must
1594 conservatively assume that the target function makes use of it.
1595 The prologue of functions with nonlocal gotos must therefore
1596 initialize the GP register to the appropriate value, and we
1597 must then make sure that this value is live at the point
1598 of the jump. (Note that this doesn't necessarily apply
1599 to targets with a nonlocal_goto pattern; they are free
1600 to implement it in their own way. Note also that this is
1601 a no-op if the GP register is a global invariant.) */
1602 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1603 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1604 emit_use (pic_offset_table_rtx
);
1606 emit_indirect_jump (r_label
);
1609 /* Search backwards to the jump insn and mark it as a
1611 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1615 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1618 else if (CALL_P (insn
))
1625 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1626 (not all will be used on all machines) that was passed to __builtin_setjmp.
1627 It updates the stack pointer in that block to the current value. This is
1628 also called directly by the SJLJ exception handling code. */
1631 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1633 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1634 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1636 = gen_rtx_MEM (sa_mode
,
1639 plus_constant (Pmode
, buf_addr
,
1640 2 * GET_MODE_SIZE (Pmode
))));
1642 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1645 /* Expand a call to __builtin_prefetch. For a target that does not support
1646 data prefetch, evaluate the memory address argument in case it has side
1650 expand_builtin_prefetch (tree exp
)
1652 tree arg0
, arg1
, arg2
;
1656 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1659 arg0
= CALL_EXPR_ARG (exp
, 0);
1661 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1662 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1664 nargs
= call_expr_nargs (exp
);
1666 arg1
= CALL_EXPR_ARG (exp
, 1);
1668 arg1
= integer_zero_node
;
1670 arg2
= CALL_EXPR_ARG (exp
, 2);
1672 arg2
= integer_three_node
;
1674 /* Argument 0 is an address. */
1675 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1677 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1678 if (TREE_CODE (arg1
) != INTEGER_CST
)
1680 error ("second argument to %<__builtin_prefetch%> must be a constant");
1681 arg1
= integer_zero_node
;
1683 op1
= expand_normal (arg1
);
1684 /* Argument 1 must be either zero or one. */
1685 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1687 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1692 /* Argument 2 (locality) must be a compile-time constant int. */
1693 if (TREE_CODE (arg2
) != INTEGER_CST
)
1695 error ("third argument to %<__builtin_prefetch%> must be a constant");
1696 arg2
= integer_zero_node
;
1698 op2
= expand_normal (arg2
);
1699 /* Argument 2 must be 0, 1, 2, or 3. */
1700 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1702 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1706 if (targetm
.have_prefetch ())
1708 class expand_operand ops
[3];
1710 create_address_operand (&ops
[0], op0
);
1711 create_integer_operand (&ops
[1], INTVAL (op1
));
1712 create_integer_operand (&ops
[2], INTVAL (op2
));
1713 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1717 /* Don't do anything with direct references to volatile memory, but
1718 generate code to handle other side effects. */
1719 if (!MEM_P (op0
) && side_effects_p (op0
))
1723 /* Get a MEM rtx for expression EXP which is the address of an operand
1724 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1725 the maximum length of the block of memory that might be accessed or
1729 get_memory_rtx (tree exp
, tree len
)
1731 tree orig_exp
= exp
;
1734 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1735 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1736 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1737 exp
= TREE_OPERAND (exp
, 0);
1739 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1740 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1742 /* Get an expression we can use to find the attributes to assign to MEM.
1743 First remove any nops. */
1744 while (CONVERT_EXPR_P (exp
)
1745 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1746 exp
= TREE_OPERAND (exp
, 0);
1748 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1749 (as builtin stringops may alias with anything). */
1750 exp
= fold_build2 (MEM_REF
,
1751 build_array_type (char_type_node
,
1752 build_range_type (sizetype
,
1753 size_one_node
, len
)),
1754 exp
, build_int_cst (ptr_type_node
, 0));
1756 /* If the MEM_REF has no acceptable address, try to get the base object
1757 from the original address we got, and build an all-aliasing
1758 unknown-sized access to that one. */
1759 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1760 set_mem_attributes (mem
, exp
, 0);
1761 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1762 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1765 exp
= build_fold_addr_expr (exp
);
1766 exp
= fold_build2 (MEM_REF
,
1767 build_array_type (char_type_node
,
1768 build_range_type (sizetype
,
1771 exp
, build_int_cst (ptr_type_node
, 0));
1772 set_mem_attributes (mem
, exp
, 0);
1774 set_mem_alias_set (mem
, 0);
1778 /* Built-in functions to perform an untyped call and return. */
1780 #define apply_args_mode \
1781 (this_target_builtins->x_apply_args_mode)
1782 #define apply_result_mode \
1783 (this_target_builtins->x_apply_result_mode)
1785 /* Return the size required for the block returned by __builtin_apply_args,
1786 and initialize apply_args_mode. */
1789 apply_args_size (void)
1791 static int size
= -1;
1795 /* The values computed by this function never change. */
1798 /* The first value is the incoming arg-pointer. */
1799 size
= GET_MODE_SIZE (Pmode
);
1801 /* The second value is the structure value address unless this is
1802 passed as an "invisible" first argument. */
1803 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1804 size
+= GET_MODE_SIZE (Pmode
);
1806 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1807 if (FUNCTION_ARG_REGNO_P (regno
))
1809 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1811 gcc_assert (mode
!= VOIDmode
);
1813 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1814 if (size
% align
!= 0)
1815 size
= CEIL (size
, align
) * align
;
1816 size
+= GET_MODE_SIZE (mode
);
1817 apply_args_mode
[regno
] = mode
;
1821 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1827 /* Return the size required for the block returned by __builtin_apply,
1828 and initialize apply_result_mode. */
1831 apply_result_size (void)
1833 static int size
= -1;
1836 /* The values computed by this function never change. */
1841 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1842 if (targetm
.calls
.function_value_regno_p (regno
))
1844 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1846 gcc_assert (mode
!= VOIDmode
);
1848 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1849 if (size
% align
!= 0)
1850 size
= CEIL (size
, align
) * align
;
1851 size
+= GET_MODE_SIZE (mode
);
1852 apply_result_mode
[regno
] = mode
;
1855 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1857 /* Allow targets that use untyped_call and untyped_return to override
1858 the size so that machine-specific information can be stored here. */
1859 #ifdef APPLY_RESULT_SIZE
1860 size
= APPLY_RESULT_SIZE
;
1866 /* Create a vector describing the result block RESULT. If SAVEP is true,
1867 the result block is used to save the values; otherwise it is used to
1868 restore the values. */
1871 result_vector (int savep
, rtx result
)
1873 int regno
, size
, align
, nelts
;
1874 fixed_size_mode mode
;
1876 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1879 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1880 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1882 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1883 if (size
% align
!= 0)
1884 size
= CEIL (size
, align
) * align
;
1885 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1886 mem
= adjust_address (result
, mode
, size
);
1887 savevec
[nelts
++] = (savep
1888 ? gen_rtx_SET (mem
, reg
)
1889 : gen_rtx_SET (reg
, mem
));
1890 size
+= GET_MODE_SIZE (mode
);
1892 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1895 /* Save the state required to perform an untyped call with the same
1896 arguments as were passed to the current function. */
1899 expand_builtin_apply_args_1 (void)
1902 int size
, align
, regno
;
1903 fixed_size_mode mode
;
1904 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1906 /* Create a block where the arg-pointer, structure value address,
1907 and argument registers can be saved. */
1908 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1910 /* Walk past the arg-pointer and structure value address. */
1911 size
= GET_MODE_SIZE (Pmode
);
1912 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1913 size
+= GET_MODE_SIZE (Pmode
);
1915 /* Save each register used in calling a function to the block. */
1916 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1917 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1919 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1920 if (size
% align
!= 0)
1921 size
= CEIL (size
, align
) * align
;
1923 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1925 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1926 size
+= GET_MODE_SIZE (mode
);
1929 /* Save the arg pointer to the block. */
1930 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1931 /* We need the pointer as the caller actually passed them to us, not
1932 as we might have pretended they were passed. Make sure it's a valid
1933 operand, as emit_move_insn isn't expected to handle a PLUS. */
1934 if (STACK_GROWS_DOWNWARD
)
1936 = force_operand (plus_constant (Pmode
, tem
,
1937 crtl
->args
.pretend_args_size
),
1939 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1941 size
= GET_MODE_SIZE (Pmode
);
1943 /* Save the structure value address unless this is passed as an
1944 "invisible" first argument. */
1945 if (struct_incoming_value
)
1946 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1947 copy_to_reg (struct_incoming_value
));
1949 /* Return the address of the block. */
1950 return copy_addr_to_reg (XEXP (registers
, 0));
1953 /* __builtin_apply_args returns block of memory allocated on
1954 the stack into which is stored the arg pointer, structure
1955 value address, static chain, and all the registers that might
1956 possibly be used in performing a function call. The code is
1957 moved to the start of the function so the incoming values are
1961 expand_builtin_apply_args (void)
1963 /* Don't do __builtin_apply_args more than once in a function.
1964 Save the result of the first call and reuse it. */
1965 if (apply_args_value
!= 0)
1966 return apply_args_value
;
1968 /* When this function is called, it means that registers must be
1969 saved on entry to this function. So we migrate the
1970 call to the first insn of this function. */
1974 temp
= expand_builtin_apply_args_1 ();
1975 rtx_insn
*seq
= get_insns ();
1978 apply_args_value
= temp
;
1980 /* Put the insns after the NOTE that starts the function.
1981 If this is inside a start_sequence, make the outer-level insn
1982 chain current, so the code is placed at the start of the
1983 function. If internal_arg_pointer is a non-virtual pseudo,
1984 it needs to be placed after the function that initializes
1986 push_topmost_sequence ();
1987 if (REG_P (crtl
->args
.internal_arg_pointer
)
1988 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1989 emit_insn_before (seq
, parm_birth_insn
);
1991 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1992 pop_topmost_sequence ();
1997 /* Perform an untyped call and save the state required to perform an
1998 untyped return of whatever value was returned by the given function. */
2001 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
2003 int size
, align
, regno
;
2004 fixed_size_mode mode
;
2005 rtx incoming_args
, result
, reg
, dest
, src
;
2006 rtx_call_insn
*call_insn
;
2007 rtx old_stack_level
= 0;
2008 rtx call_fusage
= 0;
2009 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
2011 arguments
= convert_memory_address (Pmode
, arguments
);
2013 /* Create a block where the return registers can be saved. */
2014 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
2016 /* Fetch the arg pointer from the ARGUMENTS block. */
2017 incoming_args
= gen_reg_rtx (Pmode
);
2018 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
2019 if (!STACK_GROWS_DOWNWARD
)
2020 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
2021 incoming_args
, 0, OPTAB_LIB_WIDEN
);
2023 /* Push a new argument block and copy the arguments. Do not allow
2024 the (potential) memcpy call below to interfere with our stack
2026 do_pending_stack_adjust ();
2029 /* Save the stack with nonlocal if available. */
2030 if (targetm
.have_save_stack_nonlocal ())
2031 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
2033 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
2035 /* Allocate a block of memory onto the stack and copy the memory
2036 arguments to the outgoing arguments address. We can pass TRUE
2037 as the 4th argument because we just saved the stack pointer
2038 and will restore it right after the call. */
2039 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
2041 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2042 may have already set current_function_calls_alloca to true.
2043 current_function_calls_alloca won't be set if argsize is zero,
2044 so we have to guarantee need_drap is true here. */
2045 if (SUPPORTS_STACK_ALIGNMENT
)
2046 crtl
->need_drap
= true;
2048 dest
= virtual_outgoing_args_rtx
;
2049 if (!STACK_GROWS_DOWNWARD
)
2051 if (CONST_INT_P (argsize
))
2052 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
2054 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
2056 dest
= gen_rtx_MEM (BLKmode
, dest
);
2057 set_mem_align (dest
, PARM_BOUNDARY
);
2058 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
2059 set_mem_align (src
, PARM_BOUNDARY
);
2060 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
2062 /* Refer to the argument block. */
2064 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
2065 set_mem_align (arguments
, PARM_BOUNDARY
);
2067 /* Walk past the arg-pointer and structure value address. */
2068 size
= GET_MODE_SIZE (Pmode
);
2070 size
+= GET_MODE_SIZE (Pmode
);
2072 /* Restore each of the registers previously saved. Make USE insns
2073 for each of these registers for use in making the call. */
2074 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2075 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
2077 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
2078 if (size
% align
!= 0)
2079 size
= CEIL (size
, align
) * align
;
2080 reg
= gen_rtx_REG (mode
, regno
);
2081 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
2082 use_reg (&call_fusage
, reg
);
2083 size
+= GET_MODE_SIZE (mode
);
2086 /* Restore the structure value address unless this is passed as an
2087 "invisible" first argument. */
2088 size
= GET_MODE_SIZE (Pmode
);
2091 rtx value
= gen_reg_rtx (Pmode
);
2092 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
2093 emit_move_insn (struct_value
, value
);
2094 if (REG_P (struct_value
))
2095 use_reg (&call_fusage
, struct_value
);
2098 /* All arguments and registers used for the call are set up by now! */
2099 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
2101 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2102 and we don't want to load it into a register as an optimization,
2103 because prepare_call_address already did it if it should be done. */
2104 if (GET_CODE (function
) != SYMBOL_REF
)
2105 function
= memory_address (FUNCTION_MODE
, function
);
2107 /* Generate the actual call instruction and save the return value. */
2108 if (targetm
.have_untyped_call ())
2110 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
2111 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
2112 result_vector (1, result
)));
2114 else if (targetm
.have_call_value ())
2118 /* Locate the unique return register. It is not possible to
2119 express a call that sets more than one return register using
2120 call_value; use untyped_call for that. In fact, untyped_call
2121 only needs to save the return registers in the given block. */
2122 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2123 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
2125 gcc_assert (!valreg
); /* have_untyped_call required. */
2127 valreg
= gen_rtx_REG (mode
, regno
);
2130 emit_insn (targetm
.gen_call_value (valreg
,
2131 gen_rtx_MEM (FUNCTION_MODE
, function
),
2132 const0_rtx
, NULL_RTX
, const0_rtx
));
2134 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
2139 /* Find the CALL insn we just emitted, and attach the register usage
2141 call_insn
= last_call_insn ();
2142 add_function_usage_to (call_insn
, call_fusage
);
2144 /* Restore the stack. */
2145 if (targetm
.have_save_stack_nonlocal ())
2146 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
2148 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
2149 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
2153 /* Return the address of the result block. */
2154 result
= copy_addr_to_reg (XEXP (result
, 0));
2155 return convert_memory_address (ptr_mode
, result
);
2158 /* Perform an untyped return. */
2161 expand_builtin_return (rtx result
)
2163 int size
, align
, regno
;
2164 fixed_size_mode mode
;
2166 rtx_insn
*call_fusage
= 0;
2168 result
= convert_memory_address (Pmode
, result
);
2170 apply_result_size ();
2171 result
= gen_rtx_MEM (BLKmode
, result
);
2173 if (targetm
.have_untyped_return ())
2175 rtx vector
= result_vector (0, result
);
2176 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
2181 /* Restore the return value and note that each value is used. */
2183 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2184 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
2186 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
2187 if (size
% align
!= 0)
2188 size
= CEIL (size
, align
) * align
;
2189 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
2190 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
2192 push_to_sequence (call_fusage
);
2194 call_fusage
= get_insns ();
2196 size
+= GET_MODE_SIZE (mode
);
2199 /* Put the USE insns before the return. */
2200 emit_insn (call_fusage
);
2202 /* Return whatever values was restored by jumping directly to the end
2204 expand_naked_return ();
2207 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2209 static enum type_class
2210 type_to_class (tree type
)
2212 switch (TREE_CODE (type
))
2214 case VOID_TYPE
: return void_type_class
;
2215 case INTEGER_TYPE
: return integer_type_class
;
2216 case ENUMERAL_TYPE
: return enumeral_type_class
;
2217 case BOOLEAN_TYPE
: return boolean_type_class
;
2218 case POINTER_TYPE
: return pointer_type_class
;
2219 case REFERENCE_TYPE
: return reference_type_class
;
2220 case OFFSET_TYPE
: return offset_type_class
;
2221 case REAL_TYPE
: return real_type_class
;
2222 case COMPLEX_TYPE
: return complex_type_class
;
2223 case FUNCTION_TYPE
: return function_type_class
;
2224 case METHOD_TYPE
: return method_type_class
;
2225 case RECORD_TYPE
: return record_type_class
;
2227 case QUAL_UNION_TYPE
: return union_type_class
;
2228 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
2229 ? string_type_class
: array_type_class
);
2230 case LANG_TYPE
: return lang_type_class
;
2231 default: return no_type_class
;
2235 /* Expand a call EXP to __builtin_classify_type. */
2238 expand_builtin_classify_type (tree exp
)
2240 if (call_expr_nargs (exp
))
2241 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
2242 return GEN_INT (no_type_class
);
2245 /* This helper macro, meant to be used in mathfn_built_in below, determines
2246 which among a set of builtin math functions is appropriate for a given type
2247 mode. The `F' (float) and `L' (long double) are automatically generated
2248 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2249 types, there are additional types that are considered with 'F32', 'F64',
2250 'F128', etc. suffixes. */
2251 #define CASE_MATHFN(MATHFN) \
2252 CASE_CFN_##MATHFN: \
2253 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2254 fcodel = BUILT_IN_##MATHFN##L ; break;
2255 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2257 #define CASE_MATHFN_FLOATN(MATHFN) \
2258 CASE_CFN_##MATHFN: \
2259 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2260 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2261 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2262 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2263 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2265 /* Similar to above, but appends _R after any F/L suffix. */
2266 #define CASE_MATHFN_REENT(MATHFN) \
2267 case CFN_BUILT_IN_##MATHFN##_R: \
2268 case CFN_BUILT_IN_##MATHFN##F_R: \
2269 case CFN_BUILT_IN_##MATHFN##L_R: \
2270 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2271 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2273 /* Return a function equivalent to FN but operating on floating-point
2274 values of type TYPE, or END_BUILTINS if no such function exists.
2275 This is purely an operation on function codes; it does not guarantee
2276 that the target actually has an implementation of the function. */
2278 static built_in_function
2279 mathfn_built_in_2 (tree type
, combined_fn fn
)
2282 built_in_function fcode
, fcodef
, fcodel
;
2283 built_in_function fcodef16
= END_BUILTINS
;
2284 built_in_function fcodef32
= END_BUILTINS
;
2285 built_in_function fcodef64
= END_BUILTINS
;
2286 built_in_function fcodef128
= END_BUILTINS
;
2287 built_in_function fcodef32x
= END_BUILTINS
;
2288 built_in_function fcodef64x
= END_BUILTINS
;
2289 built_in_function fcodef128x
= END_BUILTINS
;
2293 #define SEQ_OF_CASE_MATHFN \
2294 CASE_MATHFN (ACOS) \
2295 CASE_MATHFN (ACOSH) \
2296 CASE_MATHFN (ASIN) \
2297 CASE_MATHFN (ASINH) \
2298 CASE_MATHFN (ATAN) \
2299 CASE_MATHFN (ATAN2) \
2300 CASE_MATHFN (ATANH) \
2301 CASE_MATHFN (CBRT) \
2302 CASE_MATHFN_FLOATN (CEIL) \
2303 CASE_MATHFN (CEXPI) \
2304 CASE_MATHFN_FLOATN (COPYSIGN) \
2306 CASE_MATHFN (COSH) \
2307 CASE_MATHFN (DREM) \
2309 CASE_MATHFN (ERFC) \
2311 CASE_MATHFN (EXP10) \
2312 CASE_MATHFN (EXP2) \
2313 CASE_MATHFN (EXPM1) \
2314 CASE_MATHFN (FABS) \
2315 CASE_MATHFN (FDIM) \
2316 CASE_MATHFN_FLOATN (FLOOR) \
2317 CASE_MATHFN_FLOATN (FMA) \
2318 CASE_MATHFN_FLOATN (FMAX) \
2319 CASE_MATHFN_FLOATN (FMIN) \
2320 CASE_MATHFN (FMOD) \
2321 CASE_MATHFN (FREXP) \
2322 CASE_MATHFN (GAMMA) \
2323 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2324 CASE_MATHFN (HUGE_VAL) \
2325 CASE_MATHFN (HYPOT) \
2326 CASE_MATHFN (ILOGB) \
2327 CASE_MATHFN (ICEIL) \
2328 CASE_MATHFN (IFLOOR) \
2330 CASE_MATHFN (IRINT) \
2331 CASE_MATHFN (IROUND) \
2332 CASE_MATHFN (ISINF) \
2336 CASE_MATHFN (LCEIL) \
2337 CASE_MATHFN (LDEXP) \
2338 CASE_MATHFN (LFLOOR) \
2339 CASE_MATHFN (LGAMMA) \
2340 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2341 CASE_MATHFN (LLCEIL) \
2342 CASE_MATHFN (LLFLOOR) \
2343 CASE_MATHFN (LLRINT) \
2344 CASE_MATHFN (LLROUND) \
2346 CASE_MATHFN (LOG10) \
2347 CASE_MATHFN (LOG1P) \
2348 CASE_MATHFN (LOG2) \
2349 CASE_MATHFN (LOGB) \
2350 CASE_MATHFN (LRINT) \
2351 CASE_MATHFN (LROUND) \
2352 CASE_MATHFN (MODF) \
2354 CASE_MATHFN (NANS) \
2355 CASE_MATHFN_FLOATN (NEARBYINT) \
2356 CASE_MATHFN (NEXTAFTER) \
2357 CASE_MATHFN (NEXTTOWARD) \
2359 CASE_MATHFN (POWI) \
2360 CASE_MATHFN (POW10) \
2361 CASE_MATHFN (REMAINDER) \
2362 CASE_MATHFN (REMQUO) \
2363 CASE_MATHFN_FLOATN (RINT) \
2364 CASE_MATHFN_FLOATN (ROUND) \
2365 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2366 CASE_MATHFN (SCALB) \
2367 CASE_MATHFN (SCALBLN) \
2368 CASE_MATHFN (SCALBN) \
2369 CASE_MATHFN (SIGNBIT) \
2370 CASE_MATHFN (SIGNIFICAND) \
2372 CASE_MATHFN (SINCOS) \
2373 CASE_MATHFN (SINH) \
2374 CASE_MATHFN_FLOATN (SQRT) \
2376 CASE_MATHFN (TANH) \
2377 CASE_MATHFN (TGAMMA) \
2378 CASE_MATHFN_FLOATN (TRUNC) \
2386 return END_BUILTINS
;
2389 mtype
= TYPE_MAIN_VARIANT (type
);
2390 if (mtype
== double_type_node
)
2392 else if (mtype
== float_type_node
)
2394 else if (mtype
== long_double_type_node
)
2396 else if (mtype
== float16_type_node
)
2398 else if (mtype
== float32_type_node
)
2400 else if (mtype
== float64_type_node
)
2402 else if (mtype
== float128_type_node
)
2404 else if (mtype
== float32x_type_node
)
2406 else if (mtype
== float64x_type_node
)
2408 else if (mtype
== float128x_type_node
)
2411 return END_BUILTINS
;
2415 #undef CASE_MATHFN_FLOATN
2416 #undef CASE_MATHFN_REENT
2418 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2419 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2420 otherwise use the explicit declaration. If we can't do the conversion,
2424 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2426 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2427 if (fcode2
== END_BUILTINS
)
2430 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2433 return builtin_decl_explicit (fcode2
);
2436 /* Like mathfn_built_in_1, but always use the implicit array. */
2439 mathfn_built_in (tree type
, combined_fn fn
)
2441 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2444 /* Like mathfn_built_in_1, but take a built_in_function and
2445 always use the implicit array. */
2448 mathfn_built_in (tree type
, enum built_in_function fn
)
2450 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2453 /* Return the type associated with a built in function, i.e., the one
2454 to be passed to mathfn_built_in to get the type-specific
2458 mathfn_built_in_type (combined_fn fn
)
2460 #define CASE_MATHFN(MATHFN) \
2461 case CFN_BUILT_IN_##MATHFN: \
2462 return double_type_node; \
2463 case CFN_BUILT_IN_##MATHFN##F: \
2464 return float_type_node; \
2465 case CFN_BUILT_IN_##MATHFN##L: \
2466 return long_double_type_node;
2468 #define CASE_MATHFN_FLOATN(MATHFN) \
2469 CASE_MATHFN(MATHFN) \
2470 case CFN_BUILT_IN_##MATHFN##F16: \
2471 return float16_type_node; \
2472 case CFN_BUILT_IN_##MATHFN##F32: \
2473 return float32_type_node; \
2474 case CFN_BUILT_IN_##MATHFN##F64: \
2475 return float64_type_node; \
2476 case CFN_BUILT_IN_##MATHFN##F128: \
2477 return float128_type_node; \
2478 case CFN_BUILT_IN_##MATHFN##F32X: \
2479 return float32x_type_node; \
2480 case CFN_BUILT_IN_##MATHFN##F64X: \
2481 return float64x_type_node; \
2482 case CFN_BUILT_IN_##MATHFN##F128X: \
2483 return float128x_type_node;
2485 /* Similar to above, but appends _R after any F/L suffix. */
2486 #define CASE_MATHFN_REENT(MATHFN) \
2487 case CFN_BUILT_IN_##MATHFN##_R: \
2488 return double_type_node; \
2489 case CFN_BUILT_IN_##MATHFN##F_R: \
2490 return float_type_node; \
2491 case CFN_BUILT_IN_##MATHFN##L_R: \
2492 return long_double_type_node;
2503 #undef CASE_MATHFN_FLOATN
2504 #undef CASE_MATHFN_REENT
2505 #undef SEQ_OF_CASE_MATHFN
2508 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2509 return its code, otherwise return IFN_LAST. Note that this function
2510 only tests whether the function is defined in internals.def, not whether
2511 it is actually available on the target. */
2514 associated_internal_fn (tree fndecl
)
2516 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2517 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2518 switch (DECL_FUNCTION_CODE (fndecl
))
2520 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2521 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2522 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2523 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2524 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2525 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2526 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2527 #include "internal-fn.def"
2529 CASE_FLT_FN (BUILT_IN_POW10
):
2532 CASE_FLT_FN (BUILT_IN_DREM
):
2533 return IFN_REMAINDER
;
2535 CASE_FLT_FN (BUILT_IN_SCALBN
):
2536 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2537 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2546 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2547 on the current target by a call to an internal function, return the
2548 code of that internal function, otherwise return IFN_LAST. The caller
2549 is responsible for ensuring that any side-effects of the built-in
2550 call are dealt with correctly. E.g. if CALL sets errno, the caller
2551 must decide that the errno result isn't needed or make it available
2552 in some other way. */
2555 replacement_internal_fn (gcall
*call
)
2557 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2559 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2560 if (ifn
!= IFN_LAST
)
2562 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2563 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2564 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2571 /* Expand a call to the builtin trinary math functions (fma).
2572 Return NULL_RTX if a normal call should be emitted rather than expanding the
2573 function in-line. EXP is the expression that is a call to the builtin
2574 function; if convenient, the result should be placed in TARGET.
2575 SUBTARGET may be used as the target for computing one of EXP's
2579 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2581 optab builtin_optab
;
2582 rtx op0
, op1
, op2
, result
;
2584 tree fndecl
= get_callee_fndecl (exp
);
2585 tree arg0
, arg1
, arg2
;
2588 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2591 arg0
= CALL_EXPR_ARG (exp
, 0);
2592 arg1
= CALL_EXPR_ARG (exp
, 1);
2593 arg2
= CALL_EXPR_ARG (exp
, 2);
2595 switch (DECL_FUNCTION_CODE (fndecl
))
2597 CASE_FLT_FN (BUILT_IN_FMA
):
2598 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2599 builtin_optab
= fma_optab
; break;
2604 /* Make a suitable register to place result in. */
2605 mode
= TYPE_MODE (TREE_TYPE (exp
));
2607 /* Before working hard, check whether the instruction is available. */
2608 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2611 result
= gen_reg_rtx (mode
);
2613 /* Always stabilize the argument list. */
2614 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2615 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2616 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2618 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2619 op1
= expand_normal (arg1
);
2620 op2
= expand_normal (arg2
);
2624 /* Compute into RESULT.
2625 Set RESULT to wherever the result comes back. */
2626 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2629 /* If we were unable to expand via the builtin, stop the sequence
2630 (without outputting the insns) and call to the library function
2631 with the stabilized argument list. */
2635 return expand_call (exp
, target
, target
== const0_rtx
);
2638 /* Output the entire sequence. */
2639 insns
= get_insns ();
2646 /* Expand a call to the builtin sin and cos math functions.
2647 Return NULL_RTX if a normal call should be emitted rather than expanding the
2648 function in-line. EXP is the expression that is a call to the builtin
2649 function; if convenient, the result should be placed in TARGET.
2650 SUBTARGET may be used as the target for computing one of EXP's
2654 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2656 optab builtin_optab
;
2659 tree fndecl
= get_callee_fndecl (exp
);
2663 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2666 arg
= CALL_EXPR_ARG (exp
, 0);
2668 switch (DECL_FUNCTION_CODE (fndecl
))
2670 CASE_FLT_FN (BUILT_IN_SIN
):
2671 CASE_FLT_FN (BUILT_IN_COS
):
2672 builtin_optab
= sincos_optab
; break;
2677 /* Make a suitable register to place result in. */
2678 mode
= TYPE_MODE (TREE_TYPE (exp
));
2680 /* Check if sincos insn is available, otherwise fallback
2681 to sin or cos insn. */
2682 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2683 switch (DECL_FUNCTION_CODE (fndecl
))
2685 CASE_FLT_FN (BUILT_IN_SIN
):
2686 builtin_optab
= sin_optab
; break;
2687 CASE_FLT_FN (BUILT_IN_COS
):
2688 builtin_optab
= cos_optab
; break;
2693 /* Before working hard, check whether the instruction is available. */
2694 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2696 rtx result
= gen_reg_rtx (mode
);
2698 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2699 need to expand the argument again. This way, we will not perform
2700 side-effects more the once. */
2701 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2703 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2707 /* Compute into RESULT.
2708 Set RESULT to wherever the result comes back. */
2709 if (builtin_optab
== sincos_optab
)
2713 switch (DECL_FUNCTION_CODE (fndecl
))
2715 CASE_FLT_FN (BUILT_IN_SIN
):
2716 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2718 CASE_FLT_FN (BUILT_IN_COS
):
2719 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2727 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2731 /* Output the entire sequence. */
2732 insns
= get_insns ();
2738 /* If we were unable to expand via the builtin, stop the sequence
2739 (without outputting the insns) and call to the library function
2740 with the stabilized argument list. */
2744 return expand_call (exp
, target
, target
== const0_rtx
);
2747 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2748 return an RTL instruction code that implements the functionality.
2749 If that isn't possible or available return CODE_FOR_nothing. */
2751 static enum insn_code
2752 interclass_mathfn_icode (tree arg
, tree fndecl
)
2754 bool errno_set
= false;
2755 optab builtin_optab
= unknown_optab
;
2758 switch (DECL_FUNCTION_CODE (fndecl
))
2760 CASE_FLT_FN (BUILT_IN_ILOGB
):
2761 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2762 CASE_FLT_FN (BUILT_IN_ISINF
):
2763 builtin_optab
= isinf_optab
; break;
2764 case BUILT_IN_ISNORMAL
:
2765 case BUILT_IN_ISFINITE
:
2766 CASE_FLT_FN (BUILT_IN_FINITE
):
2767 case BUILT_IN_FINITED32
:
2768 case BUILT_IN_FINITED64
:
2769 case BUILT_IN_FINITED128
:
2770 case BUILT_IN_ISINFD32
:
2771 case BUILT_IN_ISINFD64
:
2772 case BUILT_IN_ISINFD128
:
2773 /* These builtins have no optabs (yet). */
2779 /* There's no easy way to detect the case we need to set EDOM. */
2780 if (flag_errno_math
&& errno_set
)
2781 return CODE_FOR_nothing
;
2783 /* Optab mode depends on the mode of the input argument. */
2784 mode
= TYPE_MODE (TREE_TYPE (arg
));
2787 return optab_handler (builtin_optab
, mode
);
2788 return CODE_FOR_nothing
;
2791 /* Expand a call to one of the builtin math functions that operate on
2792 floating point argument and output an integer result (ilogb, isinf,
2794 Return 0 if a normal call should be emitted rather than expanding the
2795 function in-line. EXP is the expression that is a call to the builtin
2796 function; if convenient, the result should be placed in TARGET. */
2799 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2801 enum insn_code icode
= CODE_FOR_nothing
;
2803 tree fndecl
= get_callee_fndecl (exp
);
2807 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2810 arg
= CALL_EXPR_ARG (exp
, 0);
2811 icode
= interclass_mathfn_icode (arg
, fndecl
);
2812 mode
= TYPE_MODE (TREE_TYPE (arg
));
2814 if (icode
!= CODE_FOR_nothing
)
2816 class expand_operand ops
[1];
2817 rtx_insn
*last
= get_last_insn ();
2818 tree orig_arg
= arg
;
2820 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2821 need to expand the argument again. This way, we will not perform
2822 side-effects more the once. */
2823 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2825 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2827 if (mode
!= GET_MODE (op0
))
2828 op0
= convert_to_mode (mode
, op0
, 0);
2830 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2831 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2832 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2833 return ops
[0].value
;
2835 delete_insns_since (last
);
2836 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2842 /* Expand a call to the builtin sincos math function.
2843 Return NULL_RTX if a normal call should be emitted rather than expanding the
2844 function in-line. EXP is the expression that is a call to the builtin
2848 expand_builtin_sincos (tree exp
)
2850 rtx op0
, op1
, op2
, target1
, target2
;
2852 tree arg
, sinp
, cosp
;
2854 location_t loc
= EXPR_LOCATION (exp
);
2855 tree alias_type
, alias_off
;
2857 if (!validate_arglist (exp
, REAL_TYPE
,
2858 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2861 arg
= CALL_EXPR_ARG (exp
, 0);
2862 sinp
= CALL_EXPR_ARG (exp
, 1);
2863 cosp
= CALL_EXPR_ARG (exp
, 2);
2865 /* Make a suitable register to place result in. */
2866 mode
= TYPE_MODE (TREE_TYPE (arg
));
2868 /* Check if sincos insn is available, otherwise emit the call. */
2869 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2872 target1
= gen_reg_rtx (mode
);
2873 target2
= gen_reg_rtx (mode
);
2875 op0
= expand_normal (arg
);
2876 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2877 alias_off
= build_int_cst (alias_type
, 0);
2878 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2880 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2883 /* Compute into target1 and target2.
2884 Set TARGET to wherever the result comes back. */
2885 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2886 gcc_assert (result
);
2888 /* Move target1 and target2 to the memory locations indicated
2890 emit_move_insn (op1
, target1
);
2891 emit_move_insn (op2
, target2
);
2896 /* Expand a call to the internal cexpi builtin to the sincos math function.
2897 EXP is the expression that is a call to the builtin function; if convenient,
2898 the result should be placed in TARGET. */
2901 expand_builtin_cexpi (tree exp
, rtx target
)
2903 tree fndecl
= get_callee_fndecl (exp
);
2907 location_t loc
= EXPR_LOCATION (exp
);
2909 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2912 arg
= CALL_EXPR_ARG (exp
, 0);
2913 type
= TREE_TYPE (arg
);
2914 mode
= TYPE_MODE (TREE_TYPE (arg
));
2916 /* Try expanding via a sincos optab, fall back to emitting a libcall
2917 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2918 is only generated from sincos, cexp or if we have either of them. */
2919 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2921 op1
= gen_reg_rtx (mode
);
2922 op2
= gen_reg_rtx (mode
);
2924 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2926 /* Compute into op1 and op2. */
2927 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2929 else if (targetm
.libc_has_function (function_sincos
, type
))
2931 tree call
, fn
= NULL_TREE
;
2935 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2936 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2937 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2938 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2939 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2940 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2944 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2945 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2946 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2947 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2948 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2949 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2951 /* Make sure not to fold the sincos call again. */
2952 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2953 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2954 call
, 3, arg
, top1
, top2
));
2958 tree call
, fn
= NULL_TREE
, narg
;
2959 tree ctype
= build_complex_type (type
);
2961 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2962 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2963 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2964 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2965 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2966 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2970 /* If we don't have a decl for cexp create one. This is the
2971 friendliest fallback if the user calls __builtin_cexpi
2972 without full target C99 function support. */
2973 if (fn
== NULL_TREE
)
2976 const char *name
= NULL
;
2978 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2980 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2982 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2985 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2986 fn
= build_fn_decl (name
, fntype
);
2989 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2990 build_real (type
, dconst0
), arg
);
2992 /* Make sure not to fold the cexp call again. */
2993 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2994 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2995 target
, VOIDmode
, EXPAND_NORMAL
);
2998 /* Now build the proper return type. */
2999 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
3000 make_tree (TREE_TYPE (arg
), op2
),
3001 make_tree (TREE_TYPE (arg
), op1
)),
3002 target
, VOIDmode
, EXPAND_NORMAL
);
3005 /* Conveniently construct a function call expression. FNDECL names the
3006 function to be called, N is the number of arguments, and the "..."
3007 parameters are the argument expressions. Unlike build_call_exr
3008 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3011 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
3014 tree fntype
= TREE_TYPE (fndecl
);
3015 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
3018 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
3020 SET_EXPR_LOCATION (fn
, loc
);
3024 /* Expand a call to one of the builtin rounding functions gcc defines
3025 as an extension (lfloor and lceil). As these are gcc extensions we
3026 do not need to worry about setting errno to EDOM.
3027 If expanding via optab fails, lower expression to (int)(floor(x)).
3028 EXP is the expression that is a call to the builtin function;
3029 if convenient, the result should be placed in TARGET. */
3032 expand_builtin_int_roundingfn (tree exp
, rtx target
)
3034 convert_optab builtin_optab
;
3037 tree fndecl
= get_callee_fndecl (exp
);
3038 enum built_in_function fallback_fn
;
3039 tree fallback_fndecl
;
3043 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
3046 arg
= CALL_EXPR_ARG (exp
, 0);
3048 switch (DECL_FUNCTION_CODE (fndecl
))
3050 CASE_FLT_FN (BUILT_IN_ICEIL
):
3051 CASE_FLT_FN (BUILT_IN_LCEIL
):
3052 CASE_FLT_FN (BUILT_IN_LLCEIL
):
3053 builtin_optab
= lceil_optab
;
3054 fallback_fn
= BUILT_IN_CEIL
;
3057 CASE_FLT_FN (BUILT_IN_IFLOOR
):
3058 CASE_FLT_FN (BUILT_IN_LFLOOR
):
3059 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
3060 builtin_optab
= lfloor_optab
;
3061 fallback_fn
= BUILT_IN_FLOOR
;
3068 /* Make a suitable register to place result in. */
3069 mode
= TYPE_MODE (TREE_TYPE (exp
));
3071 target
= gen_reg_rtx (mode
);
3073 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3074 need to expand the argument again. This way, we will not perform
3075 side-effects more the once. */
3076 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
3078 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
3082 /* Compute into TARGET. */
3083 if (expand_sfix_optab (target
, op0
, builtin_optab
))
3085 /* Output the entire sequence. */
3086 insns
= get_insns ();
3092 /* If we were unable to expand via the builtin, stop the sequence
3093 (without outputting the insns). */
3096 /* Fall back to floating point rounding optab. */
3097 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
3099 /* For non-C99 targets we may end up without a fallback fndecl here
3100 if the user called __builtin_lfloor directly. In this case emit
3101 a call to the floor/ceil variants nevertheless. This should result
3102 in the best user experience for not full C99 targets. */
3103 if (fallback_fndecl
== NULL_TREE
)
3106 const char *name
= NULL
;
3108 switch (DECL_FUNCTION_CODE (fndecl
))
3110 case BUILT_IN_ICEIL
:
3111 case BUILT_IN_LCEIL
:
3112 case BUILT_IN_LLCEIL
:
3115 case BUILT_IN_ICEILF
:
3116 case BUILT_IN_LCEILF
:
3117 case BUILT_IN_LLCEILF
:
3120 case BUILT_IN_ICEILL
:
3121 case BUILT_IN_LCEILL
:
3122 case BUILT_IN_LLCEILL
:
3125 case BUILT_IN_IFLOOR
:
3126 case BUILT_IN_LFLOOR
:
3127 case BUILT_IN_LLFLOOR
:
3130 case BUILT_IN_IFLOORF
:
3131 case BUILT_IN_LFLOORF
:
3132 case BUILT_IN_LLFLOORF
:
3135 case BUILT_IN_IFLOORL
:
3136 case BUILT_IN_LFLOORL
:
3137 case BUILT_IN_LLFLOORL
:
3144 fntype
= build_function_type_list (TREE_TYPE (arg
),
3145 TREE_TYPE (arg
), NULL_TREE
);
3146 fallback_fndecl
= build_fn_decl (name
, fntype
);
3149 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
3151 tmp
= expand_normal (exp
);
3152 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
3154 /* Truncate the result of floating point optab to integer
3155 via expand_fix (). */
3156 target
= gen_reg_rtx (mode
);
3157 expand_fix (target
, tmp
, 0);
3162 /* Expand a call to one of the builtin math functions doing integer
3164 Return 0 if a normal call should be emitted rather than expanding the
3165 function in-line. EXP is the expression that is a call to the builtin
3166 function; if convenient, the result should be placed in TARGET. */
3169 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
3171 convert_optab builtin_optab
;
3174 tree fndecl
= get_callee_fndecl (exp
);
3177 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
3179 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
3182 arg
= CALL_EXPR_ARG (exp
, 0);
3184 switch (DECL_FUNCTION_CODE (fndecl
))
3186 CASE_FLT_FN (BUILT_IN_IRINT
):
3187 fallback_fn
= BUILT_IN_LRINT
;
3189 CASE_FLT_FN (BUILT_IN_LRINT
):
3190 CASE_FLT_FN (BUILT_IN_LLRINT
):
3191 builtin_optab
= lrint_optab
;
3194 CASE_FLT_FN (BUILT_IN_IROUND
):
3195 fallback_fn
= BUILT_IN_LROUND
;
3197 CASE_FLT_FN (BUILT_IN_LROUND
):
3198 CASE_FLT_FN (BUILT_IN_LLROUND
):
3199 builtin_optab
= lround_optab
;
3206 /* There's no easy way to detect the case we need to set EDOM. */
3207 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
3210 /* Make a suitable register to place result in. */
3211 mode
= TYPE_MODE (TREE_TYPE (exp
));
3213 /* There's no easy way to detect the case we need to set EDOM. */
3214 if (!flag_errno_math
)
3216 rtx result
= gen_reg_rtx (mode
);
3218 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3219 need to expand the argument again. This way, we will not perform
3220 side-effects more the once. */
3221 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
3223 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
3227 if (expand_sfix_optab (result
, op0
, builtin_optab
))
3229 /* Output the entire sequence. */
3230 insns
= get_insns ();
3236 /* If we were unable to expand via the builtin, stop the sequence
3237 (without outputting the insns) and call to the library function
3238 with the stabilized argument list. */
3242 if (fallback_fn
!= BUILT_IN_NONE
)
3244 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3245 targets, (int) round (x) should never be transformed into
3246 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3247 a call to lround in the hope that the target provides at least some
3248 C99 functions. This should result in the best user experience for
3249 not full C99 targets. */
3250 tree fallback_fndecl
= mathfn_built_in_1
3251 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
3253 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
3254 fallback_fndecl
, 1, arg
);
3256 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
3257 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
3258 return convert_to_mode (mode
, target
, 0);
3261 return expand_call (exp
, target
, target
== const0_rtx
);
3264 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3265 a normal call should be emitted rather than expanding the function
3266 in-line. EXP is the expression that is a call to the builtin
3267 function; if convenient, the result should be placed in TARGET. */
3270 expand_builtin_powi (tree exp
, rtx target
)
3277 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3280 arg0
= CALL_EXPR_ARG (exp
, 0);
3281 arg1
= CALL_EXPR_ARG (exp
, 1);
3282 mode
= TYPE_MODE (TREE_TYPE (exp
));
3284 /* Emit a libcall to libgcc. */
3286 /* Mode of the 2nd argument must match that of an int. */
3287 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
3289 if (target
== NULL_RTX
)
3290 target
= gen_reg_rtx (mode
);
3292 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
3293 if (GET_MODE (op0
) != mode
)
3294 op0
= convert_to_mode (mode
, op0
, 0);
3295 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3296 if (GET_MODE (op1
) != mode2
)
3297 op1
= convert_to_mode (mode2
, op1
, 0);
3299 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3300 target
, LCT_CONST
, mode
,
3301 op0
, mode
, op1
, mode2
);
3306 /* Expand expression EXP which is a call to the strlen builtin. Return
3307 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3308 try to get the result in TARGET, if convenient. */
3311 expand_builtin_strlen (tree exp
, rtx target
,
3312 machine_mode target_mode
)
3314 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3317 tree src
= CALL_EXPR_ARG (exp
, 0);
3318 if (!check_read_access (exp
, src
))
3321 /* If the length can be computed at compile-time, return it. */
3322 if (tree len
= c_strlen (src
, 0))
3323 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3325 /* If the length can be computed at compile-time and is constant
3326 integer, but there are side-effects in src, evaluate
3327 src for side-effects, then return len.
3328 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3329 can be optimized into: i++; x = 3; */
3330 tree len
= c_strlen (src
, 1);
3331 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3333 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3334 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3337 unsigned int align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3339 /* If SRC is not a pointer type, don't do this operation inline. */
3343 /* Bail out if we can't compute strlen in the right mode. */
3344 machine_mode insn_mode
;
3345 enum insn_code icode
= CODE_FOR_nothing
;
3346 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
3348 icode
= optab_handler (strlen_optab
, insn_mode
);
3349 if (icode
!= CODE_FOR_nothing
)
3352 if (insn_mode
== VOIDmode
)
3355 /* Make a place to hold the source address. We will not expand
3356 the actual source until we are sure that the expansion will
3357 not fail -- there are trees that cannot be expanded twice. */
3358 rtx src_reg
= gen_reg_rtx (Pmode
);
3360 /* Mark the beginning of the strlen sequence so we can emit the
3361 source operand later. */
3362 rtx_insn
*before_strlen
= get_last_insn ();
3364 class expand_operand ops
[4];
3365 create_output_operand (&ops
[0], target
, insn_mode
);
3366 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3367 create_integer_operand (&ops
[2], 0);
3368 create_integer_operand (&ops
[3], align
);
3369 if (!maybe_expand_insn (icode
, 4, ops
))
3372 /* Check to see if the argument was declared attribute nonstring
3373 and if so, issue a warning since at this point it's not known
3374 to be nul-terminated. */
3375 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3377 /* Now that we are assured of success, expand the source. */
3379 rtx pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3382 #ifdef POINTERS_EXTEND_UNSIGNED
3383 if (GET_MODE (pat
) != Pmode
)
3384 pat
= convert_to_mode (Pmode
, pat
,
3385 POINTERS_EXTEND_UNSIGNED
);
3387 emit_move_insn (src_reg
, pat
);
3393 emit_insn_after (pat
, before_strlen
);
3395 emit_insn_before (pat
, get_insns ());
3397 /* Return the value in the proper mode for this function. */
3398 if (GET_MODE (ops
[0].value
) == target_mode
)
3399 target
= ops
[0].value
;
3400 else if (target
!= 0)
3401 convert_move (target
, ops
[0].value
, 0);
3403 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3408 /* Expand call EXP to the strnlen built-in, returning the result
3409 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3412 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3414 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3417 tree src
= CALL_EXPR_ARG (exp
, 0);
3418 tree bound
= CALL_EXPR_ARG (exp
, 1);
3423 check_read_access (exp
, src
, bound
);
3425 location_t loc
= UNKNOWN_LOCATION
;
3426 if (EXPR_HAS_LOCATION (exp
))
3427 loc
= EXPR_LOCATION (exp
);
3429 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3430 so these conversions aren't necessary. */
3431 c_strlen_data lendata
= { };
3432 tree len
= c_strlen (src
, 0, &lendata
, 1);
3434 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3436 if (TREE_CODE (bound
) == INTEGER_CST
)
3441 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3442 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3445 if (TREE_CODE (bound
) != SSA_NAME
)
3449 enum value_range_kind rng
= get_range_info (bound
, &min
, &max
);
3450 if (rng
!= VR_RANGE
)
3453 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3456 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3464 if (wi::gtu_p (min
, wi::to_wide (len
)))
3465 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3467 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3468 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3471 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3472 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3473 a target constant. */
3476 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3477 scalar_int_mode mode
)
3479 /* The REPresentation pointed to by DATA need not be a nul-terminated
3480 string but the caller guarantees it's large enough for MODE. */
3481 const char *rep
= (const char *) data
;
3483 return c_readstr (rep
+ offset
, mode
, /*nul_terminated=*/false);
3486 /* LEN specify length of the block of memcpy/memset operation.
3487 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3488 In some cases we can make very likely guess on max size, then we
3489 set it into PROBABLE_MAX_SIZE. */
3492 determine_block_size (tree len
, rtx len_rtx
,
3493 unsigned HOST_WIDE_INT
*min_size
,
3494 unsigned HOST_WIDE_INT
*max_size
,
3495 unsigned HOST_WIDE_INT
*probable_max_size
)
3497 if (CONST_INT_P (len_rtx
))
3499 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3505 enum value_range_kind range_type
= VR_UNDEFINED
;
3507 /* Determine bounds from the type. */
3508 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3509 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3512 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3513 *probable_max_size
= *max_size
3514 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3516 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3518 if (TREE_CODE (len
) == SSA_NAME
)
3519 range_type
= get_range_info (len
, &min
, &max
);
3520 if (range_type
== VR_RANGE
)
3522 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3523 *min_size
= min
.to_uhwi ();
3524 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3525 *probable_max_size
= *max_size
= max
.to_uhwi ();
3527 else if (range_type
== VR_ANTI_RANGE
)
3535 Produce anti range allowing negative values of N. We still
3536 can use the information and make a guess that N is not negative.
3538 if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3539 *probable_max_size
= min
.to_uhwi () - 1;
3542 gcc_checking_assert (*max_size
<=
3543 (unsigned HOST_WIDE_INT
)
3544 GET_MODE_MASK (GET_MODE (len_rtx
)));
3547 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3548 accessing an object with SIZE. */
3551 maybe_warn_for_bound (int opt
, location_t loc
, tree exp
, tree func
,
3552 tree bndrng
[2], tree size
, const access_data
*pad
= NULL
)
3554 if (!bndrng
[0] || TREE_NO_WARNING (exp
))
3557 tree maxobjsize
= max_object_size ();
3559 bool warned
= false;
3561 if (opt
== OPT_Wstringop_overread
)
3563 if (tree_int_cst_lt (maxobjsize
, bndrng
[0]))
3565 if (bndrng
[0] == bndrng
[1])
3567 ? warning_at (loc
, opt
,
3568 "%K%qD specified bound %E "
3569 "exceeds maximum object size %E",
3570 exp
, func
, bndrng
[0], maxobjsize
)
3571 : warning_at (loc
, opt
,
3572 "%Kspecified bound %E "
3573 "exceeds maximum object size %E",
3574 exp
, bndrng
[0], maxobjsize
));
3577 ? warning_at (loc
, opt
,
3578 "%K%qD specified bound [%E, %E] "
3579 "exceeds maximum object size %E",
3581 bndrng
[0], bndrng
[1], maxobjsize
)
3582 : warning_at (loc
, opt
,
3583 "%Kspecified bound [%E, %E] "
3584 "exceeds maximum object size %E",
3585 exp
, bndrng
[0], bndrng
[1], maxobjsize
));
3587 else if (!size
|| tree_int_cst_le (bndrng
[0], size
))
3589 else if (tree_int_cst_equal (bndrng
[0], bndrng
[1]))
3591 ? warning_at (loc
, opt
,
3592 "%K%qD specified bound %E exceeds "
3594 exp
, func
, bndrng
[0], size
)
3595 : warning_at (loc
, opt
,
3596 "%Kspecified bound %E exceeds "
3598 exp
, bndrng
[0], size
));
3601 ? warning_at (loc
, opt
,
3602 "%K%qD specified bound [%E, %E] exceeds "
3604 exp
, func
, bndrng
[0], bndrng
[1], size
)
3605 : warning_at (loc
, opt
,
3606 "%Kspecified bound [%E, %E] exceeds "
3608 exp
, bndrng
[0], bndrng
[1], size
));
3611 if (pad
&& pad
->src
.ref
)
3613 if (DECL_P (pad
->src
.ref
))
3614 inform (DECL_SOURCE_LOCATION (pad
->src
.ref
),
3615 "source object declared here");
3616 else if (EXPR_HAS_LOCATION (pad
->src
.ref
))
3617 inform (EXPR_LOCATION (pad
->src
.ref
),
3618 "source object allocated here");
3620 TREE_NO_WARNING (exp
) = true;
3626 if (tree_int_cst_lt (maxobjsize
, bndrng
[0]))
3628 if (bndrng
[0] == bndrng
[1])
3630 ? warning_at (loc
, opt
,
3631 "%K%qD specified size %E "
3632 "exceeds maximum object size %E",
3633 exp
, func
, bndrng
[0], maxobjsize
)
3634 : warning_at (loc
, opt
,
3635 "%Kspecified size %E "
3636 "exceeds maximum object size %E",
3637 exp
, bndrng
[0], maxobjsize
));
3640 ? warning_at (loc
, opt
,
3641 "%K%qD specified size between %E and %E "
3642 "exceeds maximum object size %E",
3644 bndrng
[0], bndrng
[1], maxobjsize
)
3645 : warning_at (loc
, opt
,
3646 "%Kspecified size between %E and %E "
3647 "exceeds maximum object size %E",
3648 exp
, bndrng
[0], bndrng
[1], maxobjsize
));
3650 else if (!size
|| tree_int_cst_le (bndrng
[0], size
))
3652 else if (tree_int_cst_equal (bndrng
[0], bndrng
[1]))
3654 ? warning_at (loc
, OPT_Wstringop_overflow_
,
3655 "%K%qD specified bound %E exceeds "
3656 "destination size %E",
3657 exp
, func
, bndrng
[0], size
)
3658 : warning_at (loc
, OPT_Wstringop_overflow_
,
3659 "%Kspecified bound %E exceeds "
3660 "destination size %E",
3661 exp
, bndrng
[0], size
));
3664 ? warning_at (loc
, OPT_Wstringop_overflow_
,
3665 "%K%qD specified bound [%E, %E] exceeds "
3666 "destination size %E",
3667 exp
, func
, bndrng
[0], bndrng
[1], size
)
3668 : warning_at (loc
, OPT_Wstringop_overflow_
,
3669 "%Kspecified bound [%E, %E] exceeds "
3670 "destination size %E",
3671 exp
, bndrng
[0], bndrng
[1], size
));
3675 if (pad
&& pad
->dst
.ref
)
3677 if (DECL_P (pad
->dst
.ref
))
3678 inform (DECL_SOURCE_LOCATION (pad
->dst
.ref
),
3679 "destination object declared here");
3680 else if (EXPR_HAS_LOCATION (pad
->dst
.ref
))
3681 inform (EXPR_LOCATION (pad
->dst
.ref
),
3682 "destination object allocated here");
3684 TREE_NO_WARNING (exp
) = true;
3690 /* For an expression EXP issue an access warning controlled by option OPT
3691 with access to a region SIZE bytes in size in the RANGE of sizes.
3692 WRITE is true for a write access, READ for a read access, neither for
3693 call that may or may not perform an access but for which the range
3694 is expected to valid.
3695 Returns true when a warning has been issued. */
3698 warn_for_access (location_t loc
, tree func
, tree exp
, int opt
, tree range
[2],
3699 tree size
, bool write
, bool read
)
3701 bool warned
= false;
3705 if (tree_int_cst_equal (range
[0], range
[1]))
3707 ? warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3708 "%K%qD accessing %E byte in a region "
3710 "%K%qD accessing %E bytes in a region "
3712 exp
, func
, range
[0], size
)
3713 : warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3714 "%Kaccessing %E byte in a region "
3716 "%Kaccessing %E bytes in a region "
3718 exp
, range
[0], size
));
3719 else if (tree_int_cst_sign_bit (range
[1]))
3721 /* Avoid printing the upper bound if it's invalid. */
3723 ? warning_at (loc
, opt
,
3724 "%K%qD accessing %E or more bytes in "
3725 "a region of size %E",
3726 exp
, func
, range
[0], size
)
3727 : warning_at (loc
, opt
,
3728 "%Kaccessing %E or more bytes in "
3729 "a region of size %E",
3730 exp
, range
[0], size
));
3734 ? warning_at (loc
, opt
,
3735 "%K%qD accessing between %E and %E bytes "
3736 "in a region of size %E",
3737 exp
, func
, range
[0], range
[1],
3739 : warning_at (loc
, opt
,
3740 "%Kaccessing between %E and %E bytes "
3741 "in a region of size %E",
3742 exp
, range
[0], range
[1],
3749 if (tree_int_cst_equal (range
[0], range
[1]))
3751 ? warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3752 "%K%qD writing %E byte into a region "
3753 "of size %E overflows the destination",
3754 "%K%qD writing %E bytes into a region "
3755 "of size %E overflows the destination",
3756 exp
, func
, range
[0], size
)
3757 : warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3758 "%Kwriting %E byte into a region "
3759 "of size %E overflows the destination",
3760 "%Kwriting %E bytes into a region "
3761 "of size %E overflows the destination",
3762 exp
, range
[0], size
));
3763 else if (tree_int_cst_sign_bit (range
[1]))
3765 /* Avoid printing the upper bound if it's invalid. */
3767 ? warning_at (loc
, opt
,
3768 "%K%qD writing %E or more bytes into "
3769 "a region of size %E overflows "
3771 exp
, func
, range
[0], size
)
3772 : warning_at (loc
, opt
,
3773 "%Kwriting %E or more bytes into "
3774 "a region of size %E overflows "
3776 exp
, range
[0], size
));
3780 ? warning_at (loc
, opt
,
3781 "%K%qD writing between %E and %E bytes "
3782 "into a region of size %E overflows "
3784 exp
, func
, range
[0], range
[1],
3786 : warning_at (loc
, opt
,
3787 "%Kwriting between %E and %E bytes "
3788 "into a region of size %E overflows "
3790 exp
, range
[0], range
[1],
3797 if (tree_int_cst_equal (range
[0], range
[1]))
3799 ? warning_n (loc
, OPT_Wstringop_overread
,
3800 tree_to_uhwi (range
[0]),
3801 "%K%qD reading %E byte from a region of size %E",
3802 "%K%qD reading %E bytes from a region of size %E", exp
, func
, range
[0], size
)
3803 : warning_n (loc
, OPT_Wstringop_overread
,
3804 tree_to_uhwi (range
[0]),
3805 "%Kreading %E byte from a region of size %E",
3806 "%Kreading %E bytes from a region of size %E",
3807 exp
, range
[0], size
));
3808 else if (tree_int_cst_sign_bit (range
[1]))
3810 /* Avoid printing the upper bound if it's invalid. */
3812 ? warning_at (loc
, OPT_Wstringop_overread
,
3813 "%K%qD reading %E or more bytes from "
3814 "a region of size %E",
3815 exp
, func
, range
[0], size
)
3816 : warning_at (loc
, OPT_Wstringop_overread
,
3817 "%Kreading %E or more bytes from a region "
3819 exp
, range
[0], size
));
3823 ? warning_at (loc
, OPT_Wstringop_overread
,
3824 "%K%qD reading between %E and %E bytes from "
3825 "a region of size %E",
3826 exp
, func
, range
[0], range
[1], size
)
3827 : warning_at (loc
, opt
,
3828 "%K reading between %E and %E bytes from "
3829 "a region of size %E",
3830 exp
, range
[0], range
[1], size
));
3833 TREE_NO_WARNING (exp
) = true;
3838 if (tree_int_cst_equal (range
[0], range
[1])
3839 || tree_int_cst_sign_bit (range
[1]))
3841 ? warning_n (loc
, OPT_Wstringop_overread
,
3842 tree_to_uhwi (range
[0]),
3843 "%K%qD epecting %E byte in a region of size %E",
3844 "%K%qD expecting %E bytes in a region of size %E",
3845 exp
, func
, range
[0], size
)
3846 : warning_n (loc
, OPT_Wstringop_overread
,
3847 tree_to_uhwi (range
[0]),
3848 "%Kexpecting %E byte in a region of size %E",
3849 "%Kexpecting %E bytes in a region of size %E",
3850 exp
, range
[0], size
));
3851 else if (tree_int_cst_sign_bit (range
[1]))
3853 /* Avoid printing the upper bound if it's invalid. */
3855 ? warning_at (loc
, OPT_Wstringop_overread
,
3856 "%K%qD expecting %E or more bytes in a region "
3858 exp
, func
, range
[0], size
)
3859 : warning_at (loc
, OPT_Wstringop_overread
,
3860 "%Kexpecting %E or more bytes in a region "
3862 exp
, range
[0], size
));
3866 ? warning_at (loc
, OPT_Wstringop_overread
,
3867 "%K%qD expecting between %E and %E bytes in "
3868 "a region of size %E",
3869 exp
, func
, range
[0], range
[1], size
)
3870 : warning_at (loc
, OPT_Wstringop_overread
,
3871 "%Kexpectting between %E and %E bytes in "
3872 "a region of size %E",
3873 exp
, range
[0], range
[1], size
));
3876 TREE_NO_WARNING (exp
) = true;
3881 /* Issue an inform message describing the target of an access REF.
3882 WRITE is set for a write access and clear for a read access. */
3885 inform_access (const access_ref
&ref
, access_mode mode
)
3890 /* Convert offset range and avoid including a zero range since it
3891 isn't necessarily meaningful. */
3892 HOST_WIDE_INT diff_min
= tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node
));
3893 HOST_WIDE_INT diff_max
= tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node
));
3894 HOST_WIDE_INT minoff
;
3895 HOST_WIDE_INT maxoff
= diff_max
;
3896 if (wi::fits_shwi_p (ref
.offrng
[0]))
3897 minoff
= ref
.offrng
[0].to_shwi ();
3899 minoff
= ref
.offrng
[0] < 0 ? diff_min
: diff_max
;
3901 if (wi::fits_shwi_p (ref
.offrng
[1]))
3902 maxoff
= ref
.offrng
[1].to_shwi ();
3904 if (maxoff
<= diff_min
|| maxoff
>= diff_max
)
3905 /* Avoid mentioning an upper bound that's equal to or in excess
3906 of the maximum of ptrdiff_t. */
3909 /* Convert size range and always include it since all sizes are
3911 unsigned long long minsize
= 0, maxsize
= 0;
3912 if (wi::fits_shwi_p (ref
.sizrng
[0])
3913 && wi::fits_shwi_p (ref
.sizrng
[1]))
3915 minsize
= ref
.sizrng
[0].to_shwi ();
3916 maxsize
= ref
.sizrng
[1].to_shwi ();
3921 tree allocfn
= NULL_TREE
;
3922 if (TREE_CODE (ref
.ref
) == SSA_NAME
)
3924 gimple
*stmt
= SSA_NAME_DEF_STMT (ref
.ref
);
3925 gcc_assert (is_gimple_call (stmt
));
3926 loc
= gimple_location (stmt
);
3927 allocfn
= gimple_call_fndecl (stmt
);
3929 /* Handle calls through pointers to functions. */
3930 allocfn
= gimple_call_fn (stmt
);
3932 /* SIZRNG doesn't necessarily have the same range as the allocation
3933 size determined by gimple_call_alloc_size (). */
3935 if (minsize
== maxsize
)
3936 sprintf (sizestr
, "%llu", minsize
);
3938 sprintf (sizestr
, "[%llu, %llu]", minsize
, maxsize
);
3941 else if (DECL_P (ref
.ref
))
3942 loc
= DECL_SOURCE_LOCATION (ref
.ref
);
3943 else if (EXPR_P (ref
.ref
) && EXPR_HAS_LOCATION (ref
.ref
))
3944 loc
= EXPR_LOCATION (ref
.ref
);
3948 if (mode
== access_read_write
|| mode
== access_write_only
)
3950 if (allocfn
== NULL_TREE
)
3952 if (minoff
== maxoff
)
3955 inform (loc
, "destination object %qE", ref
.ref
);
3957 inform (loc
, "at offset %wi into destination object %qE",
3961 inform (loc
, "at offset [%wi, %wi] into destination object %qE",
3962 minoff
, maxoff
, ref
.ref
);
3966 if (minoff
== maxoff
)
3969 inform (loc
, "destination object of size %s allocated by %qE",
3973 "at offset %wi into destination object of size %s "
3974 "allocated by %qE", minoff
, sizestr
, allocfn
);
3978 "at offset [%wi, %wi] into destination object of size %s "
3980 minoff
, maxoff
, sizestr
, allocfn
);
3985 if (DECL_P (ref
.ref
))
3987 if (minoff
== maxoff
)
3990 inform (loc
, "source object %qD", ref
.ref
);
3992 inform (loc
, "at offset %wi into source object %qD",
3996 inform (loc
, "at offset [%wi, %wi] into source object %qD",
3997 minoff
, maxoff
, ref
.ref
);
4001 if (minoff
== maxoff
)
4004 inform (loc
, "source object of size %s allocated by %qE",
4008 "at offset %wi into source object of size %s "
4009 "allocated by %qE", minoff
, sizestr
, allocfn
);
4013 "at offset [%wi, %wi] into source object of size %s "
4015 minoff
, maxoff
, sizestr
, allocfn
);
4018 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4019 by BNDRNG if nonnull and valid. */
4022 get_size_range (tree bound
, tree range
[2], const offset_int bndrng
[2])
4025 get_size_range (bound
, range
);
4027 if (!bndrng
|| (bndrng
[0] == 0 && bndrng
[1] == HOST_WIDE_INT_M1U
))
4030 if (range
[0] && TREE_CODE (range
[0]) == INTEGER_CST
)
4033 { wi::to_offset (range
[0]), wi::to_offset (range
[1]) };
4034 if (r
[0] < bndrng
[0])
4035 range
[0] = wide_int_to_tree (sizetype
, bndrng
[0]);
4036 if (bndrng
[1] < r
[1])
4037 range
[1] = wide_int_to_tree (sizetype
, bndrng
[1]);
4041 range
[0] = wide_int_to_tree (sizetype
, bndrng
[0]);
4042 range
[1] = wide_int_to_tree (sizetype
, bndrng
[1]);
4046 /* Try to verify that the sizes and lengths of the arguments to a string
4047 manipulation function given by EXP are within valid bounds and that
4048 the operation does not lead to buffer overflow or read past the end.
4049 Arguments other than EXP may be null. When non-null, the arguments
4050 have the following meaning:
4051 DST is the destination of a copy call or NULL otherwise.
4052 SRC is the source of a copy call or NULL otherwise.
4053 DSTWRITE is the number of bytes written into the destination obtained
4054 from the user-supplied size argument to the function (such as in
4055 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4056 MAXREAD is the user-supplied bound on the length of the source sequence
4057 (such as in strncat(d, s, N). It specifies the upper limit on the number
4058 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4059 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4060 expression EXP is a string function call (as opposed to a memory call
4061 like memcpy). As an exception, SRCSTR can also be an integer denoting
4062 the precomputed size of the source string or object (for functions like
4064 DSTSIZE is the size of the destination object.
4066 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4069 WRITE is true for write accesses, READ is true for reads. Both are
4070 false for simple size checks in calls to functions that neither read
4071 from nor write to the region.
4073 When nonnull, PAD points to a more detailed description of the access.
4075 If the call is successfully verified as safe return true, otherwise
4079 check_access (tree exp
, tree dstwrite
,
4080 tree maxread
, tree srcstr
, tree dstsize
,
4081 access_mode mode
, const access_data
*pad
/* = NULL */)
4083 /* The size of the largest object is half the address space, or
4084 PTRDIFF_MAX. (This is way too permissive.) */
4085 tree maxobjsize
= max_object_size ();
4087 /* Either an approximate/minimum the length of the source string for
4088 string functions or the size of the source object for raw memory
4090 tree slen
= NULL_TREE
;
4092 /* The range of the access in bytes; first set to the write access
4093 for functions that write and then read for those that also (or
4095 tree range
[2] = { NULL_TREE
, NULL_TREE
};
4097 /* Set to true when the exact number of bytes written by a string
4098 function like strcpy is not known and the only thing that is
4099 known is that it must be at least one (for the terminating nul). */
4100 bool at_least_one
= false;
4103 /* SRCSTR is normally a pointer to string but as a special case
4104 it can be an integer denoting the length of a string. */
4105 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
4107 if (!check_nul_terminated_array (exp
, srcstr
, maxread
))
4109 /* Try to determine the range of lengths the source string
4110 refers to. If it can be determined and is less than
4111 the upper bound given by MAXREAD add one to it for
4112 the terminating nul. Otherwise, set it to one for
4113 the same reason, or to MAXREAD as appropriate. */
4114 c_strlen_data lendata
= { };
4115 get_range_strlen (srcstr
, &lendata
, /* eltsize = */ 1);
4116 range
[0] = lendata
.minlen
;
4117 range
[1] = lendata
.maxbound
? lendata
.maxbound
: lendata
.maxlen
;
4119 && TREE_CODE (range
[0]) == INTEGER_CST
4120 && TREE_CODE (range
[1]) == INTEGER_CST
4121 && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
4123 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
4124 range
[0] = range
[1] = maxread
;
4126 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
4127 range
[0], size_one_node
);
4129 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
4131 else if (!integer_all_onesp (range
[1]))
4132 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
4133 range
[1], size_one_node
);
4139 at_least_one
= true;
4140 slen
= size_one_node
;
4147 if (!dstwrite
&& !maxread
)
4149 /* When the only available piece of data is the object size
4150 there is nothing to do. */
4154 /* Otherwise, when the length of the source sequence is known
4155 (as with strlen), set DSTWRITE to it. */
4161 dstsize
= maxobjsize
;
4163 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4165 get_size_range (dstwrite
, range
, pad
? pad
->dst
.bndrng
: NULL
);
4167 tree func
= get_callee_fndecl (exp
);
4168 /* Read vs write access by built-ins can be determined from the const
4169 qualifiers on the pointer argument. In the absence of attribute
4170 access, non-const qualified pointer arguments to user-defined
4171 functions are assumed to both read and write the objects. */
4172 const bool builtin
= func
? fndecl_built_in_p (func
) : false;
4174 /* First check the number of bytes to be written against the maximum
4177 && TREE_CODE (range
[0]) == INTEGER_CST
4178 && tree_int_cst_lt (maxobjsize
, range
[0]))
4180 location_t loc
= tree_nonartificial_location (exp
);
4181 loc
= expansion_point_location_if_in_system_header (loc
);
4183 maybe_warn_for_bound (OPT_Wstringop_overflow_
, loc
, exp
, func
, range
,
4188 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4189 constant, and in range of unsigned HOST_WIDE_INT. */
4190 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
4192 /* Next check the number of bytes to be written against the destination
4194 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
4197 && TREE_CODE (range
[0]) == INTEGER_CST
4198 && ((tree_fits_uhwi_p (dstsize
)
4199 && tree_int_cst_lt (dstsize
, range
[0]))
4201 && tree_fits_uhwi_p (dstwrite
)
4202 && tree_int_cst_lt (dstwrite
, range
[0]))))
4204 if (TREE_NO_WARNING (exp
)
4205 || (pad
&& pad
->dst
.ref
&& TREE_NO_WARNING (pad
->dst
.ref
)))
4208 location_t loc
= tree_nonartificial_location (exp
);
4209 loc
= expansion_point_location_if_in_system_header (loc
);
4211 bool warned
= false;
4212 if (dstwrite
== slen
&& at_least_one
)
4214 /* This is a call to strcpy with a destination of 0 size
4215 and a source of unknown length. The call will write
4216 at least one byte past the end of the destination. */
4218 ? warning_at (loc
, OPT_Wstringop_overflow_
,
4219 "%K%qD writing %E or more bytes into "
4220 "a region of size %E overflows "
4222 exp
, func
, range
[0], dstsize
)
4223 : warning_at (loc
, OPT_Wstringop_overflow_
,
4224 "%Kwriting %E or more bytes into "
4225 "a region of size %E overflows "
4227 exp
, range
[0], dstsize
));
4232 = mode
== access_read_only
|| mode
== access_read_write
;
4234 = mode
== access_write_only
|| mode
== access_read_write
;
4235 warned
= warn_for_access (loc
, func
, exp
,
4236 OPT_Wstringop_overflow_
,
4238 write
, read
&& !builtin
);
4243 TREE_NO_WARNING (exp
) = true;
4245 inform_access (pad
->dst
, pad
->mode
);
4248 /* Return error when an overflow has been detected. */
4253 /* Check the maximum length of the source sequence against the size
4254 of the destination object if known, or against the maximum size
4258 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4259 PAD is nonnull and BNDRNG is valid. */
4260 get_size_range (maxread
, range
, pad
? pad
->src
.bndrng
: NULL
);
4262 location_t loc
= tree_nonartificial_location (exp
);
4263 loc
= expansion_point_location_if_in_system_header (loc
);
4265 tree size
= dstsize
;
4266 if (pad
&& pad
->mode
== access_read_only
)
4267 size
= wide_int_to_tree (sizetype
, pad
->src
.sizrng
[1]);
4269 if (range
[0] && maxread
&& tree_fits_uhwi_p (size
))
4271 if (tree_int_cst_lt (maxobjsize
, range
[0]))
4273 maybe_warn_for_bound (OPT_Wstringop_overread
, loc
, exp
, func
,
4278 if (size
!= maxobjsize
&& tree_int_cst_lt (size
, range
[0]))
4280 int opt
= (dstwrite
|| mode
!= access_read_only
4281 ? OPT_Wstringop_overflow_
4282 : OPT_Wstringop_overread
);
4283 maybe_warn_for_bound (opt
, loc
, exp
, func
, range
, size
, pad
);
4288 maybe_warn_nonstring_arg (func
, exp
);
4291 /* Check for reading past the end of SRC. */
4292 bool overread
= (slen
4296 && TREE_CODE (slen
) == INTEGER_CST
4297 && tree_int_cst_lt (slen
, range
[0]));
4298 /* If none is determined try to get a better answer based on the details
4302 && pad
->src
.sizrng
[1] >= 0
4303 && pad
->src
.offrng
[0] >= 0
4304 && (pad
->src
.offrng
[1] < 0
4305 || pad
->src
.offrng
[0] <= pad
->src
.offrng
[1]))
4307 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4308 PAD is nonnull and BNDRNG is valid. */
4309 get_size_range (maxread
, range
, pad
? pad
->src
.bndrng
: NULL
);
4310 /* Set OVERREAD for reads starting just past the end of an object. */
4311 overread
= pad
->src
.sizrng
[1] - pad
->src
.offrng
[0] < pad
->src
.bndrng
[0];
4312 range
[0] = wide_int_to_tree (sizetype
, pad
->src
.bndrng
[0]);
4313 slen
= size_zero_node
;
4318 if (TREE_NO_WARNING (exp
)
4319 || (srcstr
&& TREE_NO_WARNING (srcstr
))
4320 || (pad
&& pad
->src
.ref
&& TREE_NO_WARNING (pad
->src
.ref
)))
4323 location_t loc
= tree_nonartificial_location (exp
);
4324 loc
= expansion_point_location_if_in_system_header (loc
);
4327 = mode
== access_read_only
|| mode
== access_read_write
;
4328 if (warn_for_access (loc
, func
, exp
, OPT_Wstringop_overread
, range
,
4331 TREE_NO_WARNING (exp
) = true;
4333 inform_access (pad
->src
, access_read_only
);
4341 /* A convenience wrapper for check_access above to check access
4342 by a read-only function like puts. */
4345 check_read_access (tree exp
, tree src
, tree bound
/* = NULL_TREE */,
4348 if (!warn_stringop_overread
)
4351 access_data
data (exp
, access_read_only
, NULL_TREE
, false, bound
, true);
4352 compute_objsize (src
, ost
, &data
.src
);
4353 return check_access (exp
, /*dstwrite=*/ NULL_TREE
, /*maxread=*/ bound
,
4354 /*srcstr=*/ src
, /*dstsize=*/ NULL_TREE
, data
.mode
,
4358 /* If STMT is a call to an allocation function, returns the constant
4359 maximum size of the object allocated by the call represented as
4360 sizetype. If nonnull, sets RNG1[] to the range of the size.
4361 When nonnull, uses RVALS for range information, otherwise calls
4362 get_range_info to get it.
4363 Returns null when STMT is not a call to a valid allocation function. */
4366 gimple_call_alloc_size (gimple
*stmt
, wide_int rng1
[2] /* = NULL */,
4367 range_query
* /* = NULL */)
4373 if (tree fndecl
= gimple_call_fndecl (stmt
))
4374 allocfntype
= TREE_TYPE (fndecl
);
4376 allocfntype
= gimple_call_fntype (stmt
);
4381 unsigned argidx1
= UINT_MAX
, argidx2
= UINT_MAX
;
4382 tree at
= lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype
));
4385 if (!gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
4391 unsigned nargs
= gimple_call_num_args (stmt
);
4393 if (argidx1
== UINT_MAX
)
4395 tree atval
= TREE_VALUE (at
);
4399 argidx1
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
4400 if (nargs
<= argidx1
)
4403 atval
= TREE_CHAIN (atval
);
4406 argidx2
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
4407 if (nargs
<= argidx2
)
4412 tree size
= gimple_call_arg (stmt
, argidx1
);
4414 wide_int rng1_buf
[2];
4415 /* If RNG1 is not set, use the buffer. */
4419 /* Use maximum precision to avoid overflow below. */
4420 const int prec
= ADDR_MAX_PRECISION
;
4424 /* Determine the largest valid range size, including zero. */
4425 if (!get_size_range (size
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
4427 rng1
[0] = wi::to_wide (r
[0], prec
);
4428 rng1
[1] = wi::to_wide (r
[1], prec
);
4431 if (argidx2
> nargs
&& TREE_CODE (size
) == INTEGER_CST
)
4432 return fold_convert (sizetype
, size
);
4434 /* To handle ranges do the math in wide_int and return the product
4435 of the upper bounds as a constant. Ignore anti-ranges. */
4436 tree n
= argidx2
< nargs
? gimple_call_arg (stmt
, argidx2
) : integer_one_node
;
4440 /* As above, use the full non-negative range on failure. */
4441 if (!get_size_range (n
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
4443 rng2
[0] = wi::to_wide (r
[0], prec
);
4444 rng2
[1] = wi::to_wide (r
[1], prec
);
4447 /* Compute products of both bounds for the caller but return the lesser
4448 of SIZE_MAX and the product of the upper bounds as a constant. */
4449 rng1
[0] = rng1
[0] * rng2
[0];
4450 rng1
[1] = rng1
[1] * rng2
[1];
4452 const tree size_max
= TYPE_MAX_VALUE (sizetype
);
4453 if (wi::gtu_p (rng1
[1], wi::to_wide (size_max
, prec
)))
4455 rng1
[1] = wi::to_wide (size_max
, prec
);
4459 return wide_int_to_tree (sizetype
, rng1
[1]);
4462 /* For an access to an object referenced to by the function parameter PTR
4463 of pointer type, and set RNG[] to the range of sizes of the object
4464 obtainedfrom the attribute access specification for the current function.
4465 Return the function parameter on success and null otherwise. */
4468 gimple_parm_array_size (tree ptr
, wide_int rng
[2],
4469 range_query
* /* = NULL */)
4471 /* For a function argument try to determine the byte size of the array
4472 from the current function declaratation (e.g., attribute access or
4474 tree var
= SSA_NAME_VAR (ptr
);
4475 if (TREE_CODE (var
) != PARM_DECL
)
4478 const unsigned prec
= TYPE_PRECISION (sizetype
);
4481 attr_access
*access
= get_parm_access (rdwr_idx
, var
);
4485 if (access
->sizarg
!= UINT_MAX
)
4487 /* TODO: Try to extract the range from the argument based on
4488 those of subsequent assertions or based on known calls to
4489 the current function. */
4493 if (!access
->minsize
)
4496 /* Only consider ordinary array bound at level 2 (or above if it's
4498 if (warn_array_parameter
< 2 && !access
->static_p
)
4501 rng
[0] = wi::zero (prec
);
4502 rng
[1] = wi::uhwi (access
->minsize
, prec
);
4503 /* Multiply the array bound encoded in the attribute by the size
4504 of what the pointer argument to which it decays points to. */
4505 tree eltype
= TREE_TYPE (TREE_TYPE (ptr
));
4506 tree size
= TYPE_SIZE_UNIT (eltype
);
4507 if (!size
|| TREE_CODE (size
) != INTEGER_CST
)
4510 rng
[1] *= wi::to_wide (size
, prec
);
4514 /* Wrapper around the wide_int overload of get_range that accepts
4515 offset_int instead. For middle end expressions returns the same
4516 result. For a subset of nonconstamt expressions emitted by the front
4517 end determines a more precise range than would be possible otherwise. */
4520 get_offset_range (tree x
, gimple
*stmt
, offset_int r
[2], range_query
*rvals
)
4523 if (TREE_CODE (x
) == PLUS_EXPR
)
4525 /* Handle constant offsets in pointer addition expressions seen
4526 n the front end IL. */
4527 tree op
= TREE_OPERAND (x
, 1);
4528 if (TREE_CODE (op
) == INTEGER_CST
)
4530 op
= fold_convert (signed_type_for (TREE_TYPE (op
)), op
);
4531 add
= wi::to_offset (op
);
4532 x
= TREE_OPERAND (x
, 0);
4536 if (TREE_CODE (x
) == NOP_EXPR
)
4537 /* Also handle conversions to sizetype seen in the front end IL. */
4538 x
= TREE_OPERAND (x
, 0);
4540 tree type
= TREE_TYPE (x
);
4542 if (TREE_CODE (x
) != INTEGER_CST
4543 && TREE_CODE (x
) != SSA_NAME
)
4545 if (TYPE_UNSIGNED (type
)
4546 && TYPE_PRECISION (type
) == TYPE_PRECISION (sizetype
))
4547 type
= signed_type_for (type
);
4549 r
[0] = wi::to_offset (TYPE_MIN_VALUE (type
)) + add
;
4550 r
[1] = wi::to_offset (TYPE_MAX_VALUE (type
)) + add
;
4555 if (!get_range (x
, stmt
, wr
, rvals
))
4558 signop sgn
= SIGNED
;
4559 /* Only convert signed integers or unsigned sizetype to a signed
4560 offset and avoid converting large positive values in narrower
4561 types to negative offsets. */
4562 if (TYPE_UNSIGNED (type
)
4563 && wr
[0].get_precision () < TYPE_PRECISION (sizetype
))
4566 r
[0] = offset_int::from (wr
[0], sgn
);
4567 r
[1] = offset_int::from (wr
[1], sgn
);
4571 /* Return the argument that the call STMT to a built-in function returns
4572 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
4573 from the argument reflected in the value returned by the built-in if it
4574 can be determined, otherwise to 0 and HWI_M1U respectively. */
4577 gimple_call_return_array (gimple
*stmt
, offset_int offrng
[2],
4580 if (!gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
4581 || gimple_call_num_args (stmt
) < 1)
4584 tree fn
= gimple_call_fndecl (stmt
);
4585 switch (DECL_FUNCTION_CODE (fn
))
4587 case BUILT_IN_MEMCPY
:
4588 case BUILT_IN_MEMCPY_CHK
:
4589 case BUILT_IN_MEMMOVE
:
4590 case BUILT_IN_MEMMOVE_CHK
:
4591 case BUILT_IN_MEMSET
:
4592 case BUILT_IN_STPCPY
:
4593 case BUILT_IN_STPCPY_CHK
:
4594 case BUILT_IN_STPNCPY
:
4595 case BUILT_IN_STPNCPY_CHK
:
4596 case BUILT_IN_STRCAT
:
4597 case BUILT_IN_STRCAT_CHK
:
4598 case BUILT_IN_STRCPY
:
4599 case BUILT_IN_STRCPY_CHK
:
4600 case BUILT_IN_STRNCAT
:
4601 case BUILT_IN_STRNCAT_CHK
:
4602 case BUILT_IN_STRNCPY
:
4603 case BUILT_IN_STRNCPY_CHK
:
4604 offrng
[0] = offrng
[1] = 0;
4605 return gimple_call_arg (stmt
, 0);
4607 case BUILT_IN_MEMPCPY
:
4608 case BUILT_IN_MEMPCPY_CHK
:
4610 tree off
= gimple_call_arg (stmt
, 2);
4611 if (!get_offset_range (off
, stmt
, offrng
, rvals
))
4614 offrng
[1] = HOST_WIDE_INT_M1U
;
4616 return gimple_call_arg (stmt
, 0);
4619 case BUILT_IN_MEMCHR
:
4621 tree off
= gimple_call_arg (stmt
, 2);
4622 if (get_offset_range (off
, stmt
, offrng
, rvals
))
4627 offrng
[1] = HOST_WIDE_INT_M1U
;
4629 return gimple_call_arg (stmt
, 0);
4632 case BUILT_IN_STRCHR
:
4633 case BUILT_IN_STRRCHR
:
4634 case BUILT_IN_STRSTR
:
4637 offrng
[1] = HOST_WIDE_INT_M1U
;
4639 return gimple_call_arg (stmt
, 0);
4648 /* Helper to compute the size of the object referenced by the PTR
4649 expression which must have pointer type, using Object Size type
4650 OSTYPE (only the least significant 2 bits are used).
4651 On success, sets PREF->REF to the DECL of the referenced object
4652 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4653 offsets into it, and PREF->SIZRNG to the range of sizes of
4655 VISITED is used to avoid visiting the same PHI operand multiple
4656 times, and, when nonnull, RVALS to determine range information.
4657 Returns true on success, false when a meaningful size (or range)
4658 cannot be determined.
4660 The function is intended for diagnostics and should not be used
4661 to influence code generation or optimization. */
4664 compute_objsize (tree ptr
, int ostype
, access_ref
*pref
, bitmap
*visited
,
4669 const bool addr
= TREE_CODE (ptr
) == ADDR_EXPR
;
4671 ptr
= TREE_OPERAND (ptr
, 0);
4677 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (ptr
)))
4679 /* Set the maximum size if the reference is to the pointer
4680 itself (as opposed to what it points to). */
4681 pref
->set_max_size_range ();
4685 if (tree size
= decl_init_size (ptr
, false))
4686 if (TREE_CODE (size
) == INTEGER_CST
)
4688 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
4692 pref
->set_max_size_range ();
4696 const tree_code code
= TREE_CODE (ptr
);
4698 if (code
== BIT_FIELD_REF
)
4700 tree ref
= TREE_OPERAND (ptr
, 0);
4701 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
))
4704 offset_int off
= wi::to_offset (pref
->eval (TREE_OPERAND (ptr
, 2)));
4705 pref
->add_offset (off
/ BITS_PER_UNIT
);
4709 if (code
== COMPONENT_REF
)
4711 tree ref
= TREE_OPERAND (ptr
, 0);
4712 tree field
= TREE_OPERAND (ptr
, 1);
4716 /* In OSTYPE zero (for raw memory functions like memcpy), use
4717 the maximum size instead if the identity of the enclosing
4718 object cannot be determined. */
4719 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
))
4722 /* Otherwise, use the size of the enclosing object and add
4723 the offset of the member to the offset computed so far. */
4724 tree offset
= byte_position (field
);
4725 if (TREE_CODE (offset
) == INTEGER_CST
)
4726 pref
->add_offset (wi::to_offset (offset
));
4728 pref
->add_max_offset ();
4732 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (field
)))
4734 /* Set maximum size if the reference is to the pointer member
4735 itself (as opposed to what it points to). */
4736 pref
->set_max_size_range ();
4742 /* SAM is set for array members that might need special treatment. */
4743 special_array_member sam
;
4744 tree size
= component_ref_size (ptr
, &sam
);
4745 if (sam
== special_array_member::int_0
)
4746 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
4747 else if (!pref
->trail1special
&& sam
== special_array_member::trail_1
)
4748 pref
->sizrng
[0] = pref
->sizrng
[1] = 1;
4749 else if (size
&& TREE_CODE (size
) == INTEGER_CST
)
4750 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
4753 /* When the size of the member is unknown it's either a flexible
4754 array member or a trailing special array member (either zero
4755 length or one-element). Set the size to the maximum minus
4756 the constant size of the type. */
4757 pref
->sizrng
[0] = 0;
4758 pref
->sizrng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
4759 if (tree recsize
= TYPE_SIZE_UNIT (TREE_TYPE (ref
)))
4760 if (TREE_CODE (recsize
) == INTEGER_CST
)
4761 pref
->sizrng
[1] -= wi::to_offset (recsize
);
4766 if (code
== ARRAY_REF
|| code
== MEM_REF
)
4768 tree ref
= TREE_OPERAND (ptr
, 0);
4769 tree reftype
= TREE_TYPE (ref
);
4770 if (code
== ARRAY_REF
4771 && TREE_CODE (TREE_TYPE (reftype
)) == POINTER_TYPE
)
4772 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4776 if (code
== MEM_REF
&& TREE_CODE (reftype
) == POINTER_TYPE
)
4778 /* Give up for MEM_REFs of vector types; those may be synthesized
4779 from multiple assignments to consecutive data members. See PR
4781 FIXME: Deal with this more generally, e.g., by marking up such
4782 MEM_REFs at the time they're created. */
4783 reftype
= TREE_TYPE (reftype
);
4784 if (TREE_CODE (reftype
) == VECTOR_TYPE
)
4788 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
))
4792 tree off
= pref
->eval (TREE_OPERAND (ptr
, 1));
4793 if (!get_offset_range (off
, NULL
, orng
, rvals
))
4795 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
4796 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
4797 orng
[0] = -orng
[1] - 1;
4800 if (TREE_CODE (ptr
) == ARRAY_REF
)
4802 /* Convert the array index range determined above to a byte
4804 tree lowbnd
= array_ref_low_bound (ptr
);
4805 if (!integer_zerop (lowbnd
) && tree_fits_uhwi_p (lowbnd
))
4807 /* Adjust the index by the low bound of the array domain
4808 (normally zero but 1 in Fortran). */
4809 unsigned HOST_WIDE_INT lb
= tree_to_uhwi (lowbnd
);
4814 tree eltype
= TREE_TYPE (ptr
);
4815 tree tpsize
= TYPE_SIZE_UNIT (eltype
);
4816 if (!tpsize
|| TREE_CODE (tpsize
) != INTEGER_CST
)
4818 pref
->add_max_offset ();
4822 offset_int sz
= wi::to_offset (tpsize
);
4826 if (ostype
&& TREE_CODE (eltype
) == ARRAY_TYPE
)
4828 /* Except for the permissive raw memory functions which use
4829 the size of the whole object determined above, use the size
4830 of the referenced array. Because the overall offset is from
4831 the beginning of the complete array object add this overall
4832 offset to the size of array. */
4833 offset_int sizrng
[2] =
4835 pref
->offrng
[0] + orng
[0] + sz
,
4836 pref
->offrng
[1] + orng
[1] + sz
4838 if (sizrng
[1] < sizrng
[0])
4839 std::swap (sizrng
[0], sizrng
[1]);
4840 if (sizrng
[0] >= 0 && sizrng
[0] <= pref
->sizrng
[0])
4841 pref
->sizrng
[0] = sizrng
[0];
4842 if (sizrng
[1] >= 0 && sizrng
[1] <= pref
->sizrng
[1])
4843 pref
->sizrng
[1] = sizrng
[1];
4847 pref
->add_offset (orng
[0], orng
[1]);
4851 if (code
== TARGET_MEM_REF
)
4853 tree ref
= TREE_OPERAND (ptr
, 0);
4854 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
))
4857 /* TODO: Handle remaining operands. Until then, add maximum offset. */
4859 pref
->add_max_offset ();
4863 if (code
== INTEGER_CST
)
4865 /* Pointer constants other than null are most likely the result
4866 of erroneous null pointer addition/subtraction. Set size to
4867 zero. For null pointers, set size to the maximum for now
4868 since those may be the result of jump threading. */
4869 if (integer_zerop (ptr
))
4870 pref
->set_max_size_range ();
4872 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
4878 if (code
== STRING_CST
)
4880 pref
->sizrng
[0] = pref
->sizrng
[1] = TREE_STRING_LENGTH (ptr
);
4884 if (code
== POINTER_PLUS_EXPR
)
4886 tree ref
= TREE_OPERAND (ptr
, 0);
4887 if (!compute_objsize (ref
, ostype
, pref
, visited
, rvals
))
4891 tree off
= pref
->eval (TREE_OPERAND (ptr
, 1));
4892 if (get_offset_range (off
, NULL
, orng
, rvals
))
4893 pref
->add_offset (orng
[0], orng
[1]);
4895 pref
->add_max_offset ();
4899 if (code
== VIEW_CONVERT_EXPR
)
4901 ptr
= TREE_OPERAND (ptr
, 0);
4902 return compute_objsize (ptr
, ostype
, pref
, visited
, rvals
);
4905 if (TREE_CODE (ptr
) == SSA_NAME
)
4907 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
4908 if (is_gimple_call (stmt
))
4910 /* If STMT is a call to an allocation function get the size
4911 from its argument(s). If successful, also set *PREF->REF
4912 to PTR for the caller to include in diagnostics. */
4914 if (gimple_call_alloc_size (stmt
, wr
, rvals
))
4917 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
4918 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
4919 /* Constrain both bounds to a valid size. */
4920 offset_int maxsize
= wi::to_offset (max_object_size ());
4921 if (pref
->sizrng
[0] > maxsize
)
4922 pref
->sizrng
[0] = maxsize
;
4923 if (pref
->sizrng
[1] > maxsize
)
4924 pref
->sizrng
[1] = maxsize
;
4928 /* For functions known to return one of their pointer arguments
4929 try to determine what the returned pointer points to, and on
4930 success add OFFRNG which was set to the offset added by
4931 the function (e.g., memchr) to the overall offset. */
4932 offset_int offrng
[2];
4933 if (tree ret
= gimple_call_return_array (stmt
, offrng
, rvals
))
4935 if (!compute_objsize (ret
, ostype
, pref
, visited
, rvals
))
4938 /* Cap OFFRNG[1] to at most the remaining size of
4940 offset_int remrng
[2];
4941 remrng
[1] = pref
->size_remaining (remrng
);
4942 if (remrng
[1] < offrng
[1])
4943 offrng
[1] = remrng
[1];
4944 pref
->add_offset (offrng
[0], offrng
[1]);
4948 /* For other calls that might return arbitrary pointers
4949 including into the middle of objects set the size
4950 range to maximum, clear PREF->BASE0, and also set
4951 PREF->REF to include in diagnostics. */
4952 pref
->set_max_size_range ();
4953 pref
->base0
= false;
4960 if (gimple_nop_p (stmt
))
4962 /* For a function argument try to determine the byte size
4963 of the array from the current function declaratation
4964 (e.g., attribute access or related). */
4966 if (tree ref
= gimple_parm_array_size (ptr
, wr
, rvals
))
4968 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
4969 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
4974 pref
->set_max_size_range ();
4975 pref
->base0
= false;
4977 if (tree var
= SSA_NAME_VAR (ptr
))
4978 if (TREE_CODE (var
) == PARM_DECL
)
4984 /* TODO: Handle PHI. */
4986 if (!is_gimple_assign (stmt
))
4988 /* Clear BASE0 since the assigned pointer might point into
4989 the middle of the object, set the maximum size range and,
4990 if the SSA_NAME refers to a function argumnent, set
4992 pref
->base0
= false;
4993 pref
->set_max_size_range ();
4994 if (tree var
= SSA_NAME_VAR (ptr
))
4995 if (TREE_CODE (var
) == PARM_DECL
)
5000 ptr
= gimple_assign_rhs1 (stmt
);
5002 tree_code code
= gimple_assign_rhs_code (stmt
);
5004 if (code
== POINTER_PLUS_EXPR
5005 && TREE_CODE (TREE_TYPE (ptr
)) == POINTER_TYPE
)
5007 /* Compute the size of the object first. */
5008 if (!compute_objsize (ptr
, ostype
, pref
, visited
, rvals
))
5012 tree off
= gimple_assign_rhs2 (stmt
);
5013 if (get_offset_range (off
, stmt
, orng
, rvals
))
5014 pref
->add_offset (orng
[0], orng
[1]);
5016 pref
->add_max_offset ();
5020 if (code
== ADDR_EXPR
)
5021 return compute_objsize (ptr
, ostype
, pref
, visited
, rvals
);
5023 /* This could be an assignment from a nonlocal pointer. Save PTR
5024 to mention in diagnostics but otherwise treat it as a pointer
5025 to an unknown object. */
5029 /* Assume all other expressions point into an unknown object
5030 of the maximum valid size. */
5031 pref
->base0
= false;
5032 pref
->set_max_size_range ();
5036 /* A "public" wrapper around the above. Clients should use this overload
5040 compute_objsize (tree ptr
, int ostype
, access_ref
*pref
,
5041 range_query
*rvals
/* = NULL */)
5043 bitmap visited
= NULL
;
5046 = compute_objsize (ptr
, ostype
, pref
, &visited
, rvals
);
5049 BITMAP_FREE (visited
);
5054 offset_int maxsize
= pref
->size_remaining ();
5055 if (pref
->base0
&& pref
->offrng
[0] < 0 && pref
->offrng
[1] >= 0)
5056 pref
->offrng
[0] = 0;
5057 return wide_int_to_tree (sizetype
, maxsize
);
5060 /* Transitional wrapper around the above. The function should be removed
5061 once callers transition to one of the two above. */
5064 compute_objsize (tree ptr
, int ostype
, tree
*pdecl
/* = NULL */,
5065 tree
*poff
/* = NULL */, range_query
*rvals
/* = NULL */)
5067 /* Set the initial offsets to zero and size to negative to indicate
5068 none has been computed yet. */
5070 tree size
= compute_objsize (ptr
, ostype
, &ref
, rvals
);
5071 if (!size
|| !ref
.base0
)
5078 *poff
= wide_int_to_tree (ptrdiff_type_node
, ref
.offrng
[ref
.offrng
[0] < 0]);
5083 /* Helper to determine and check the sizes of the source and the destination
5084 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5085 call expression, DEST is the destination argument, SRC is the source
5086 argument or null, and LEN is the number of bytes. Use Object Size type-0
5087 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5088 (no overflow or invalid sizes), false otherwise. */
5091 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
5093 /* For functions like memset and memcpy that operate on raw memory
5094 try to determine the size of the largest source and destination
5095 object using type-0 Object Size regardless of the object size
5096 type specified by the option. */
5097 access_data
data (exp
, access_read_write
);
5098 tree srcsize
= src
? compute_objsize (src
, 0, &data
.src
) : NULL_TREE
;
5099 tree dstsize
= compute_objsize (dest
, 0, &data
.dst
);
5101 return check_access (exp
, size
, /*maxread=*/NULL_TREE
,
5102 srcsize
, dstsize
, data
.mode
, &data
);
5105 /* Validate memchr arguments without performing any expansion.
5109 expand_builtin_memchr (tree exp
, rtx
)
5111 if (!validate_arglist (exp
,
5112 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5115 tree arg1
= CALL_EXPR_ARG (exp
, 0);
5116 tree len
= CALL_EXPR_ARG (exp
, 2);
5118 check_read_access (exp
, arg1
, len
, 0);
5123 /* Expand a call EXP to the memcpy builtin.
5124 Return NULL_RTX if we failed, the caller should emit a normal call,
5125 otherwise try to get the result in TARGET, if convenient (and in
5126 mode MODE if that's convenient). */
5129 expand_builtin_memcpy (tree exp
, rtx target
)
5131 if (!validate_arglist (exp
,
5132 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5135 tree dest
= CALL_EXPR_ARG (exp
, 0);
5136 tree src
= CALL_EXPR_ARG (exp
, 1);
5137 tree len
= CALL_EXPR_ARG (exp
, 2);
5139 check_memop_access (exp
, dest
, src
, len
);
5141 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
5142 /*retmode=*/ RETURN_BEGIN
, false);
5145 /* Check a call EXP to the memmove built-in for validity.
5146 Return NULL_RTX on both success and failure. */
5149 expand_builtin_memmove (tree exp
, rtx target
)
5151 if (!validate_arglist (exp
,
5152 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5155 tree dest
= CALL_EXPR_ARG (exp
, 0);
5156 tree src
= CALL_EXPR_ARG (exp
, 1);
5157 tree len
= CALL_EXPR_ARG (exp
, 2);
5159 check_memop_access (exp
, dest
, src
, len
);
5161 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
5162 /*retmode=*/ RETURN_BEGIN
, true);
5165 /* Expand a call EXP to the mempcpy builtin.
5166 Return NULL_RTX if we failed; the caller should emit a normal call,
5167 otherwise try to get the result in TARGET, if convenient (and in
5168 mode MODE if that's convenient). */
5171 expand_builtin_mempcpy (tree exp
, rtx target
)
5173 if (!validate_arglist (exp
,
5174 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5177 tree dest
= CALL_EXPR_ARG (exp
, 0);
5178 tree src
= CALL_EXPR_ARG (exp
, 1);
5179 tree len
= CALL_EXPR_ARG (exp
, 2);
5181 /* Policy does not generally allow using compute_objsize (which
5182 is used internally by check_memop_size) to change code generation
5183 or drive optimization decisions.
5185 In this instance it is safe because the code we generate has
5186 the same semantics regardless of the return value of
5187 check_memop_sizes. Exactly the same amount of data is copied
5188 and the return value is exactly the same in both cases.
5190 Furthermore, check_memop_size always uses mode 0 for the call to
5191 compute_objsize, so the imprecise nature of compute_objsize is
5194 /* Avoid expanding mempcpy into memcpy when the call is determined
5195 to overflow the buffer. This also prevents the same overflow
5196 from being diagnosed again when expanding memcpy. */
5197 if (!check_memop_access (exp
, dest
, src
, len
))
5200 return expand_builtin_mempcpy_args (dest
, src
, len
,
5201 target
, exp
, /*retmode=*/ RETURN_END
);
5204 /* Helper function to do the actual work for expand of memory copy family
5205 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5206 of memory from SRC to DEST and assign to TARGET if convenient. Return
5207 value is based on RETMODE argument. */
5210 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
5211 rtx target
, tree exp
, memop_ret retmode
,
5214 unsigned int src_align
= get_pointer_alignment (src
);
5215 unsigned int dest_align
= get_pointer_alignment (dest
);
5216 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
5217 HOST_WIDE_INT expected_size
= -1;
5218 unsigned int expected_align
= 0;
5219 unsigned HOST_WIDE_INT min_size
;
5220 unsigned HOST_WIDE_INT max_size
;
5221 unsigned HOST_WIDE_INT probable_max_size
;
5225 /* If DEST is not a pointer type, call the normal function. */
5226 if (dest_align
== 0)
5229 /* If either SRC is not a pointer type, don't do this
5230 operation in-line. */
5234 if (currently_expanding_gimple_stmt
)
5235 stringop_block_profile (currently_expanding_gimple_stmt
,
5236 &expected_align
, &expected_size
);
5238 if (expected_align
< dest_align
)
5239 expected_align
= dest_align
;
5240 dest_mem
= get_memory_rtx (dest
, len
);
5241 set_mem_align (dest_mem
, dest_align
);
5242 len_rtx
= expand_normal (len
);
5243 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
5244 &probable_max_size
);
5246 /* Try to get the byte representation of the constant SRC points to,
5247 with its byte size in NBYTES. */
5248 unsigned HOST_WIDE_INT nbytes
;
5249 const char *rep
= getbyterep (src
, &nbytes
);
5251 /* If the function's constant bound LEN_RTX is less than or equal
5252 to the byte size of the representation of the constant argument,
5253 and if block move would be done by pieces, we can avoid loading
5254 the bytes from memory and only store the computed constant.
5255 This works in the overlap (memmove) case as well because
5256 store_by_pieces just generates a series of stores of constants
5257 from the representation returned by getbyterep(). */
5259 && CONST_INT_P (len_rtx
)
5260 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
5261 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
5262 CONST_CAST (char *, rep
),
5265 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
5266 builtin_memcpy_read_str
,
5267 CONST_CAST (char *, rep
),
5268 dest_align
, false, retmode
);
5269 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
5270 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5274 src_mem
= get_memory_rtx (src
, len
);
5275 set_mem_align (src_mem
, src_align
);
5277 /* Copy word part most expediently. */
5278 enum block_op_methods method
= BLOCK_OP_NORMAL
;
5279 if (CALL_EXPR_TAILCALL (exp
)
5280 && (retmode
== RETURN_BEGIN
|| target
== const0_rtx
))
5281 method
= BLOCK_OP_TAILCALL
;
5282 bool use_mempcpy_call
= (targetm
.libc_has_fast_function (BUILT_IN_MEMPCPY
)
5283 && retmode
== RETURN_END
5285 && target
!= const0_rtx
);
5286 if (use_mempcpy_call
)
5287 method
= BLOCK_OP_NO_LIBCALL_RET
;
5288 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
5289 expected_align
, expected_size
,
5290 min_size
, max_size
, probable_max_size
,
5291 use_mempcpy_call
, &is_move_done
,
5294 /* Bail out when a mempcpy call would be expanded as libcall and when
5295 we have a target that provides a fast implementation
5296 of mempcpy routine. */
5300 if (dest_addr
== pc_rtx
)
5305 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
5306 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
5309 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
5311 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
5312 /* stpcpy pointer to last byte. */
5313 if (retmode
== RETURN_END_MINUS_ONE
)
5314 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
5321 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
5322 rtx target
, tree orig_exp
, memop_ret retmode
)
5324 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
5328 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
5329 we failed, the caller should emit a normal call, otherwise try to
5330 get the result in TARGET, if convenient.
5331 Return value is based on RETMODE argument. */
5334 expand_movstr (tree dest
, tree src
, rtx target
, memop_ret retmode
)
5336 class expand_operand ops
[3];
5340 if (!targetm
.have_movstr ())
5343 dest_mem
= get_memory_rtx (dest
, NULL
);
5344 src_mem
= get_memory_rtx (src
, NULL
);
5345 if (retmode
== RETURN_BEGIN
)
5347 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
5348 dest_mem
= replace_equiv_address (dest_mem
, target
);
5351 create_output_operand (&ops
[0],
5352 retmode
!= RETURN_BEGIN
? target
: NULL_RTX
, Pmode
);
5353 create_fixed_operand (&ops
[1], dest_mem
);
5354 create_fixed_operand (&ops
[2], src_mem
);
5355 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
5358 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
5360 target
= ops
[0].value
;
5361 /* movstr is supposed to set end to the address of the NUL
5362 terminator. If the caller requested a mempcpy-like return value,
5364 if (retmode
== RETURN_END
)
5366 rtx tem
= plus_constant (GET_MODE (target
),
5367 gen_lowpart (GET_MODE (target
), target
), 1);
5368 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
5374 /* Do some very basic size validation of a call to the strcpy builtin
5375 given by EXP. Return NULL_RTX to have the built-in expand to a call
5376 to the library function. */
5379 expand_builtin_strcat (tree exp
)
5381 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
5382 || !warn_stringop_overflow
)
5385 tree dest
= CALL_EXPR_ARG (exp
, 0);
5386 tree src
= CALL_EXPR_ARG (exp
, 1);
5388 /* There is no way here to determine the length of the string in
5389 the destination to which the SRC string is being appended so
5390 just diagnose cases when the souce string is longer than
5391 the destination object. */
5392 access_data
data (exp
, access_read_write
, NULL_TREE
, true,
5394 const int ost
= warn_stringop_overflow
? warn_stringop_overflow
- 1 : 1;
5395 compute_objsize (src
, ost
, &data
.src
);
5396 tree destsize
= compute_objsize (dest
, ost
, &data
.dst
);
5398 check_access (exp
, /*dstwrite=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
5399 src
, destsize
, data
.mode
, &data
);
5404 /* Expand expression EXP, which is a call to the strcpy builtin. Return
5405 NULL_RTX if we failed the caller should emit a normal call, otherwise
5406 try to get the result in TARGET, if convenient (and in mode MODE if that's
5410 expand_builtin_strcpy (tree exp
, rtx target
)
5412 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5415 tree dest
= CALL_EXPR_ARG (exp
, 0);
5416 tree src
= CALL_EXPR_ARG (exp
, 1);
5418 if (warn_stringop_overflow
)
5420 access_data
data (exp
, access_read_write
, NULL_TREE
, true,
5422 const int ost
= warn_stringop_overflow
? warn_stringop_overflow
- 1 : 1;
5423 compute_objsize (src
, ost
, &data
.src
);
5424 tree dstsize
= compute_objsize (dest
, ost
, &data
.dst
);
5425 check_access (exp
, /*dstwrite=*/ NULL_TREE
,
5426 /*maxread=*/ NULL_TREE
, /*srcstr=*/ src
,
5427 dstsize
, data
.mode
, &data
);
5430 if (rtx ret
= expand_builtin_strcpy_args (exp
, dest
, src
, target
))
5432 /* Check to see if the argument was declared attribute nonstring
5433 and if so, issue a warning since at this point it's not known
5434 to be nul-terminated. */
5435 tree fndecl
= get_callee_fndecl (exp
);
5436 maybe_warn_nonstring_arg (fndecl
, exp
);
5443 /* Helper function to do the actual work for expand_builtin_strcpy. The
5444 arguments to the builtin_strcpy call DEST and SRC are broken out
5445 so that this can also be called without constructing an actual CALL_EXPR.
5446 The other arguments and return value are the same as for
5447 expand_builtin_strcpy. */
5450 expand_builtin_strcpy_args (tree exp
, tree dest
, tree src
, rtx target
)
5452 /* Detect strcpy calls with unterminated arrays.. */
5455 if (tree nonstr
= unterminated_array (src
, &size
, &exact
))
5457 /* NONSTR refers to the non-nul terminated constant array. */
5458 warn_string_no_nul (EXPR_LOCATION (exp
), exp
, NULL
, src
, nonstr
,
5463 return expand_movstr (dest
, src
, target
, /*retmode=*/ RETURN_BEGIN
);
5466 /* Expand a call EXP to the stpcpy builtin.
5467 Return NULL_RTX if we failed the caller should emit a normal call,
5468 otherwise try to get the result in TARGET, if convenient (and in
5469 mode MODE if that's convenient). */
5472 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
5475 location_t loc
= EXPR_LOCATION (exp
);
5477 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5480 dst
= CALL_EXPR_ARG (exp
, 0);
5481 src
= CALL_EXPR_ARG (exp
, 1);
5483 if (warn_stringop_overflow
)
5485 access_data
data (exp
, access_read_write
);
5486 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1,
5488 check_access (exp
, /*dstwrite=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
5489 src
, destsize
, data
.mode
, &data
);
5492 /* If return value is ignored, transform stpcpy into strcpy. */
5493 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
5495 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
5496 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
5497 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
5504 /* Ensure we get an actual string whose length can be evaluated at
5505 compile-time, not an expression containing a string. This is
5506 because the latter will potentially produce pessimized code
5507 when used to produce the return value. */
5508 c_strlen_data lendata
= { };
5510 || !(len
= c_strlen (src
, 0, &lendata
, 1)))
5511 return expand_movstr (dst
, src
, target
,
5512 /*retmode=*/ RETURN_END_MINUS_ONE
);
5515 warn_string_no_nul (EXPR_LOCATION (exp
), exp
, NULL
, src
, lendata
.decl
);
5517 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
5518 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
5520 /*retmode=*/ RETURN_END_MINUS_ONE
);
5525 if (TREE_CODE (len
) == INTEGER_CST
)
5527 rtx len_rtx
= expand_normal (len
);
5529 if (CONST_INT_P (len_rtx
))
5531 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
5537 if (mode
!= VOIDmode
)
5538 target
= gen_reg_rtx (mode
);
5540 target
= gen_reg_rtx (GET_MODE (ret
));
5542 if (GET_MODE (target
) != GET_MODE (ret
))
5543 ret
= gen_lowpart (GET_MODE (target
), ret
);
5545 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
5546 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
5554 return expand_movstr (dst
, src
, target
,
5555 /*retmode=*/ RETURN_END_MINUS_ONE
);
5559 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
5560 arguments while being careful to avoid duplicate warnings (which could
5561 be issued if the expander were to expand the call, resulting in it
5562 being emitted in expand_call(). */
5565 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
5567 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
5569 /* The call has been successfully expanded. Check for nonstring
5570 arguments and issue warnings as appropriate. */
5571 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
5578 /* Check a call EXP to the stpncpy built-in for validity.
5579 Return NULL_RTX on both success and failure. */
5582 expand_builtin_stpncpy (tree exp
, rtx
)
5584 if (!validate_arglist (exp
,
5585 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5586 || !warn_stringop_overflow
)
5589 /* The source and destination of the call. */
5590 tree dest
= CALL_EXPR_ARG (exp
, 0);
5591 tree src
= CALL_EXPR_ARG (exp
, 1);
5593 /* The exact number of bytes to write (not the maximum). */
5594 tree len
= CALL_EXPR_ARG (exp
, 2);
5595 access_data
data (exp
, access_read_write
);
5596 /* The size of the destination object. */
5597 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
5598 check_access (exp
, len
, /*maxread=*/len
, src
, destsize
, data
.mode
, &data
);
5602 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5603 bytes from constant string DATA + OFFSET and return it as target
5607 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
5608 scalar_int_mode mode
)
5610 const char *str
= (const char *) data
;
5612 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
5615 return c_readstr (str
+ offset
, mode
);
5618 /* Helper to check the sizes of sequences and the destination of calls
5619 to __builtin_strncat and __builtin___strncat_chk. Returns true on
5620 success (no overflow or invalid sizes), false otherwise. */
5623 check_strncat_sizes (tree exp
, tree objsize
)
5625 tree dest
= CALL_EXPR_ARG (exp
, 0);
5626 tree src
= CALL_EXPR_ARG (exp
, 1);
5627 tree maxread
= CALL_EXPR_ARG (exp
, 2);
5629 /* Try to determine the range of lengths that the source expression
5631 c_strlen_data lendata
= { };
5632 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
5634 /* Try to verify that the destination is big enough for the shortest
5637 access_data
data (exp
, access_read_write
, maxread
, true);
5638 if (!objsize
&& warn_stringop_overflow
)
5640 /* If it hasn't been provided by __strncat_chk, try to determine
5641 the size of the destination object into which the source is
5643 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
5646 /* Add one for the terminating nul. */
5647 tree srclen
= (lendata
.minlen
5648 ? fold_build2 (PLUS_EXPR
, size_type_node
, lendata
.minlen
,
5652 /* The strncat function copies at most MAXREAD bytes and always appends
5653 the terminating nul so the specified upper bound should never be equal
5654 to (or greater than) the size of the destination. */
5655 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
5656 && tree_int_cst_equal (objsize
, maxread
))
5658 location_t loc
= tree_nonartificial_location (exp
);
5659 loc
= expansion_point_location_if_in_system_header (loc
);
5661 warning_at (loc
, OPT_Wstringop_overflow_
,
5662 "%K%qD specified bound %E equals destination size",
5663 exp
, get_callee_fndecl (exp
), maxread
);
5669 || (maxread
&& tree_fits_uhwi_p (maxread
)
5670 && tree_fits_uhwi_p (srclen
)
5671 && tree_int_cst_lt (maxread
, srclen
)))
5674 /* The number of bytes to write is LEN but check_access will alsoa
5675 check SRCLEN if LEN's value isn't known. */
5676 return check_access (exp
, /*dstwrite=*/NULL_TREE
, maxread
, srclen
,
5677 objsize
, data
.mode
, &data
);
5680 /* Similar to expand_builtin_strcat, do some very basic size validation
5681 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
5682 the built-in expand to a call to the library function. */
5685 expand_builtin_strncat (tree exp
, rtx
)
5687 if (!validate_arglist (exp
,
5688 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5689 || !warn_stringop_overflow
)
5692 tree dest
= CALL_EXPR_ARG (exp
, 0);
5693 tree src
= CALL_EXPR_ARG (exp
, 1);
5694 /* The upper bound on the number of bytes to write. */
5695 tree maxread
= CALL_EXPR_ARG (exp
, 2);
5697 /* Detect unterminated source (only). */
5698 if (!check_nul_terminated_array (exp
, src
, maxread
))
5701 /* The length of the source sequence. */
5702 tree slen
= c_strlen (src
, 1);
5704 /* Try to determine the range of lengths that the source expression
5705 refers to. Since the lengths are only used for warning and not
5706 for code generation disable strict mode below. */
5710 c_strlen_data lendata
= { };
5711 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
5712 maxlen
= lendata
.maxbound
;
5715 access_data
data (exp
, access_read_write
);
5716 /* Try to verify that the destination is big enough for the shortest
5717 string. First try to determine the size of the destination object
5718 into which the source is being copied. */
5719 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
5721 /* Add one for the terminating nul. */
5722 tree srclen
= (maxlen
5723 ? fold_build2 (PLUS_EXPR
, size_type_node
, maxlen
,
5727 /* The strncat function copies at most MAXREAD bytes and always appends
5728 the terminating nul so the specified upper bound should never be equal
5729 to (or greater than) the size of the destination. */
5730 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
5731 && tree_int_cst_equal (destsize
, maxread
))
5733 location_t loc
= tree_nonartificial_location (exp
);
5734 loc
= expansion_point_location_if_in_system_header (loc
);
5736 warning_at (loc
, OPT_Wstringop_overflow_
,
5737 "%K%qD specified bound %E equals destination size",
5738 exp
, get_callee_fndecl (exp
), maxread
);
5744 || (maxread
&& tree_fits_uhwi_p (maxread
)
5745 && tree_fits_uhwi_p (srclen
)
5746 && tree_int_cst_lt (maxread
, srclen
)))
5749 check_access (exp
, /*dstwrite=*/NULL_TREE
, maxread
, srclen
,
5750 destsize
, data
.mode
, &data
);
5754 /* Expand expression EXP, which is a call to the strncpy builtin. Return
5755 NULL_RTX if we failed the caller should emit a normal call. */
5758 expand_builtin_strncpy (tree exp
, rtx target
)
5760 location_t loc
= EXPR_LOCATION (exp
);
5762 if (!validate_arglist (exp
,
5763 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5765 tree dest
= CALL_EXPR_ARG (exp
, 0);
5766 tree src
= CALL_EXPR_ARG (exp
, 1);
5767 /* The number of bytes to write (not the maximum). */
5768 tree len
= CALL_EXPR_ARG (exp
, 2);
5770 /* The length of the source sequence. */
5771 tree slen
= c_strlen (src
, 1);
5773 if (warn_stringop_overflow
)
5775 access_data
data (exp
, access_read_write
, len
, true, len
, true);
5776 const int ost
= warn_stringop_overflow
? warn_stringop_overflow
- 1 : 1;
5777 compute_objsize (src
, ost
, &data
.src
);
5778 tree dstsize
= compute_objsize (dest
, ost
, &data
.dst
);
5779 /* The number of bytes to write is LEN but check_access will also
5780 check SLEN if LEN's value isn't known. */
5781 check_access (exp
, /*dstwrite=*/len
,
5782 /*maxread=*/len
, src
, dstsize
, data
.mode
, &data
);
5785 /* We must be passed a constant len and src parameter. */
5786 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
5789 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
5791 /* We're required to pad with trailing zeros if the requested
5792 len is greater than strlen(s2)+1. In that case try to
5793 use store_by_pieces, if it fails, punt. */
5794 if (tree_int_cst_lt (slen
, len
))
5796 unsigned int dest_align
= get_pointer_alignment (dest
);
5797 const char *p
= c_getstr (src
);
5800 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
5801 || !can_store_by_pieces (tree_to_uhwi (len
),
5802 builtin_strncpy_read_str
,
5803 CONST_CAST (char *, p
),
5807 dest_mem
= get_memory_rtx (dest
, len
);
5808 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
5809 builtin_strncpy_read_str
,
5810 CONST_CAST (char *, p
), dest_align
, false,
5812 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
5813 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5820 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5821 bytes from constant string DATA + OFFSET and return it as target
5825 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
5826 scalar_int_mode mode
)
5828 const char *c
= (const char *) data
;
5829 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
5831 memset (p
, *c
, GET_MODE_SIZE (mode
));
5833 return c_readstr (p
, mode
);
5836 /* Callback routine for store_by_pieces. Return the RTL of a register
5837 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5838 char value given in the RTL register data. For example, if mode is
5839 4 bytes wide, return the RTL for 0x01010101*data. */
5842 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
5843 scalar_int_mode mode
)
5849 size
= GET_MODE_SIZE (mode
);
5853 p
= XALLOCAVEC (char, size
);
5854 memset (p
, 1, size
);
5855 coeff
= c_readstr (p
, mode
);
5857 target
= convert_to_mode (mode
, (rtx
) data
, 1);
5858 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
5859 return force_reg (mode
, target
);
5862 /* Expand expression EXP, which is a call to the memset builtin. Return
5863 NULL_RTX if we failed the caller should emit a normal call, otherwise
5864 try to get the result in TARGET, if convenient (and in mode MODE if that's
5868 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
5870 if (!validate_arglist (exp
,
5871 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5874 tree dest
= CALL_EXPR_ARG (exp
, 0);
5875 tree val
= CALL_EXPR_ARG (exp
, 1);
5876 tree len
= CALL_EXPR_ARG (exp
, 2);
5878 check_memop_access (exp
, dest
, NULL_TREE
, len
);
5880 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
5883 /* Helper function to do the actual work for expand_builtin_memset. The
5884 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5885 so that this can also be called without constructing an actual CALL_EXPR.
5886 The other arguments and return value are the same as for
5887 expand_builtin_memset. */
5890 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
5891 rtx target
, machine_mode mode
, tree orig_exp
)
5894 enum built_in_function fcode
;
5895 machine_mode val_mode
;
5897 unsigned int dest_align
;
5898 rtx dest_mem
, dest_addr
, len_rtx
;
5899 HOST_WIDE_INT expected_size
= -1;
5900 unsigned int expected_align
= 0;
5901 unsigned HOST_WIDE_INT min_size
;
5902 unsigned HOST_WIDE_INT max_size
;
5903 unsigned HOST_WIDE_INT probable_max_size
;
5905 dest_align
= get_pointer_alignment (dest
);
5907 /* If DEST is not a pointer type, don't do this operation in-line. */
5908 if (dest_align
== 0)
5911 if (currently_expanding_gimple_stmt
)
5912 stringop_block_profile (currently_expanding_gimple_stmt
,
5913 &expected_align
, &expected_size
);
5915 if (expected_align
< dest_align
)
5916 expected_align
= dest_align
;
5918 /* If the LEN parameter is zero, return DEST. */
5919 if (integer_zerop (len
))
5921 /* Evaluate and ignore VAL in case it has side-effects. */
5922 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5923 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
5926 /* Stabilize the arguments in case we fail. */
5927 dest
= builtin_save_expr (dest
);
5928 val
= builtin_save_expr (val
);
5929 len
= builtin_save_expr (len
);
5931 len_rtx
= expand_normal (len
);
5932 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
5933 &probable_max_size
);
5934 dest_mem
= get_memory_rtx (dest
, len
);
5935 val_mode
= TYPE_MODE (unsigned_char_type_node
);
5937 if (TREE_CODE (val
) != INTEGER_CST
)
5941 val_rtx
= expand_normal (val
);
5942 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
5944 /* Assume that we can memset by pieces if we can store
5945 * the coefficients by pieces (in the required modes).
5946 * We can't pass builtin_memset_gen_str as that emits RTL. */
5948 if (tree_fits_uhwi_p (len
)
5949 && can_store_by_pieces (tree_to_uhwi (len
),
5950 builtin_memset_read_str
, &c
, dest_align
,
5953 val_rtx
= force_reg (val_mode
, val_rtx
);
5954 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
5955 builtin_memset_gen_str
, val_rtx
, dest_align
,
5956 true, RETURN_BEGIN
);
5958 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
5959 dest_align
, expected_align
,
5960 expected_size
, min_size
, max_size
,
5964 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
5965 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5969 if (target_char_cast (val
, &c
))
5974 if (tree_fits_uhwi_p (len
)
5975 && can_store_by_pieces (tree_to_uhwi (len
),
5976 builtin_memset_read_str
, &c
, dest_align
,
5978 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
5979 builtin_memset_read_str
, &c
, dest_align
, true,
5981 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
5982 gen_int_mode (c
, val_mode
),
5983 dest_align
, expected_align
,
5984 expected_size
, min_size
, max_size
,
5988 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
5989 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
5993 set_mem_align (dest_mem
, dest_align
);
5994 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
5995 CALL_EXPR_TAILCALL (orig_exp
)
5996 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
5997 expected_align
, expected_size
,
6003 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
6004 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
6010 fndecl
= get_callee_fndecl (orig_exp
);
6011 fcode
= DECL_FUNCTION_CODE (fndecl
);
6012 if (fcode
== BUILT_IN_MEMSET
)
6013 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
6015 else if (fcode
== BUILT_IN_BZERO
)
6016 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
6020 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
6021 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
6022 return expand_call (fn
, target
, target
== const0_rtx
);
6025 /* Expand expression EXP, which is a call to the bzero builtin. Return
6026 NULL_RTX if we failed the caller should emit a normal call. */
6029 expand_builtin_bzero (tree exp
)
6031 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6034 tree dest
= CALL_EXPR_ARG (exp
, 0);
6035 tree size
= CALL_EXPR_ARG (exp
, 1);
6037 check_memop_access (exp
, dest
, NULL_TREE
, size
);
6039 /* New argument list transforming bzero(ptr x, int y) to
6040 memset(ptr x, int 0, size_t y). This is done this way
6041 so that if it isn't expanded inline, we fallback to
6042 calling bzero instead of memset. */
6044 location_t loc
= EXPR_LOCATION (exp
);
6046 return expand_builtin_memset_args (dest
, integer_zero_node
,
6047 fold_convert_loc (loc
,
6048 size_type_node
, size
),
6049 const0_rtx
, VOIDmode
, exp
);
6052 /* Try to expand cmpstr operation ICODE with the given operands.
6053 Return the result rtx on success, otherwise return null. */
6056 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
6057 HOST_WIDE_INT align
)
6059 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
6061 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
6064 class expand_operand ops
[4];
6065 create_output_operand (&ops
[0], target
, insn_mode
);
6066 create_fixed_operand (&ops
[1], arg1_rtx
);
6067 create_fixed_operand (&ops
[2], arg2_rtx
);
6068 create_integer_operand (&ops
[3], align
);
6069 if (maybe_expand_insn (icode
, 4, ops
))
6070 return ops
[0].value
;
6074 /* Expand expression EXP, which is a call to the memcmp built-in function.
6075 Return NULL_RTX if we failed and the caller should emit a normal call,
6076 otherwise try to get the result in TARGET, if convenient.
6077 RESULT_EQ is true if we can relax the returned value to be either zero
6078 or nonzero, without caring about the sign. */
6081 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
6083 if (!validate_arglist (exp
,
6084 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6087 tree arg1
= CALL_EXPR_ARG (exp
, 0);
6088 tree arg2
= CALL_EXPR_ARG (exp
, 1);
6089 tree len
= CALL_EXPR_ARG (exp
, 2);
6091 /* Diagnose calls where the specified length exceeds the size of either
6093 if (!check_read_access (exp
, arg1
, len
, 0)
6094 || !check_read_access (exp
, arg2
, len
, 0))
6097 /* Due to the performance benefit, always inline the calls first
6098 when result_eq is false. */
6099 rtx result
= NULL_RTX
;
6100 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
6101 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
6103 result
= inline_expand_builtin_bytecmp (exp
, target
);
6108 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
6109 location_t loc
= EXPR_LOCATION (exp
);
6111 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
6112 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
6114 /* If we don't have POINTER_TYPE, call the function. */
6115 if (arg1_align
== 0 || arg2_align
== 0)
6118 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
6119 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
6120 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
6122 /* Set MEM_SIZE as appropriate. */
6123 if (CONST_INT_P (len_rtx
))
6125 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
6126 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
6129 by_pieces_constfn constfn
= NULL
;
6131 /* Try to get the byte representation of the constant ARG2 (or, only
6132 when the function's result is used for equality to zero, ARG1)
6133 points to, with its byte size in NBYTES. */
6134 unsigned HOST_WIDE_INT nbytes
;
6135 const char *rep
= getbyterep (arg2
, &nbytes
);
6136 if (result_eq
&& rep
== NULL
)
6138 /* For equality to zero the arguments are interchangeable. */
6139 rep
= getbyterep (arg1
, &nbytes
);
6141 std::swap (arg1_rtx
, arg2_rtx
);
6144 /* If the function's constant bound LEN_RTX is less than or equal
6145 to the byte size of the representation of the constant argument,
6146 and if block move would be done by pieces, we can avoid loading
6147 the bytes from memory and only store the computed constant result. */
6149 && CONST_INT_P (len_rtx
)
6150 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
)
6151 constfn
= builtin_memcpy_read_str
;
6153 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
6154 TREE_TYPE (len
), target
,
6156 CONST_CAST (char *, rep
));
6160 /* Return the value in the proper mode for this function. */
6161 if (GET_MODE (result
) == mode
)
6166 convert_move (target
, result
, 0);
6170 return convert_to_mode (mode
, result
, 0);
6176 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
6177 if we failed the caller should emit a normal call, otherwise try to get
6178 the result in TARGET, if convenient. */
6181 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
6183 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6186 tree arg1
= CALL_EXPR_ARG (exp
, 0);
6187 tree arg2
= CALL_EXPR_ARG (exp
, 1);
6189 if (!check_read_access (exp
, arg1
)
6190 || !check_read_access (exp
, arg2
))
6193 /* Due to the performance benefit, always inline the calls first. */
6194 rtx result
= NULL_RTX
;
6195 result
= inline_expand_builtin_bytecmp (exp
, target
);
6199 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
6200 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
6201 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
6204 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
6205 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
6207 /* If we don't have POINTER_TYPE, call the function. */
6208 if (arg1_align
== 0 || arg2_align
== 0)
6211 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6212 arg1
= builtin_save_expr (arg1
);
6213 arg2
= builtin_save_expr (arg2
);
6215 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
6216 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
6218 /* Try to call cmpstrsi. */
6219 if (cmpstr_icode
!= CODE_FOR_nothing
)
6220 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
6221 MIN (arg1_align
, arg2_align
));
6223 /* Try to determine at least one length and call cmpstrnsi. */
6224 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
6229 tree len1
= c_strlen (arg1
, 1);
6230 tree len2
= c_strlen (arg2
, 1);
6233 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
6235 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
6237 /* If we don't have a constant length for the first, use the length
6238 of the second, if we know it. We don't require a constant for
6239 this case; some cost analysis could be done if both are available
6240 but neither is constant. For now, assume they're equally cheap,
6241 unless one has side effects. If both strings have constant lengths,
6248 else if (TREE_SIDE_EFFECTS (len1
))
6250 else if (TREE_SIDE_EFFECTS (len2
))
6252 else if (TREE_CODE (len1
) != INTEGER_CST
)
6254 else if (TREE_CODE (len2
) != INTEGER_CST
)
6256 else if (tree_int_cst_lt (len1
, len2
))
6261 /* If both arguments have side effects, we cannot optimize. */
6262 if (len
&& !TREE_SIDE_EFFECTS (len
))
6264 arg3_rtx
= expand_normal (len
);
6265 result
= expand_cmpstrn_or_cmpmem
6266 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
6267 arg3_rtx
, MIN (arg1_align
, arg2_align
));
6271 tree fndecl
= get_callee_fndecl (exp
);
6274 /* Check to see if the argument was declared attribute nonstring
6275 and if so, issue a warning since at this point it's not known
6276 to be nul-terminated. */
6277 maybe_warn_nonstring_arg (fndecl
, exp
);
6279 /* Return the value in the proper mode for this function. */
6280 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
6281 if (GET_MODE (result
) == mode
)
6284 return convert_to_mode (mode
, result
, 0);
6285 convert_move (target
, result
, 0);
6289 /* Expand the library call ourselves using a stabilized argument
6290 list to avoid re-evaluating the function's arguments twice. */
6291 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
6292 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
6293 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
6294 return expand_call (fn
, target
, target
== const0_rtx
);
6297 /* Expand expression EXP, which is a call to the strncmp builtin. Return
6298 NULL_RTX if we failed the caller should emit a normal call, otherwise
6299 try to get the result in TARGET, if convenient. */
6302 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
6303 ATTRIBUTE_UNUSED machine_mode mode
)
6305 if (!validate_arglist (exp
,
6306 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6309 tree arg1
= CALL_EXPR_ARG (exp
, 0);
6310 tree arg2
= CALL_EXPR_ARG (exp
, 1);
6311 tree arg3
= CALL_EXPR_ARG (exp
, 2);
6313 if (!check_nul_terminated_array (exp
, arg1
, arg3
)
6314 || !check_nul_terminated_array (exp
, arg2
, arg3
))
6317 location_t loc
= tree_nonartificial_location (exp
);
6318 loc
= expansion_point_location_if_in_system_header (loc
);
6320 tree len1
= c_strlen (arg1
, 1);
6321 tree len2
= c_strlen (arg2
, 1);
6325 /* Check to see if the argument was declared attribute nonstring
6326 and if so, issue a warning since at this point it's not known
6327 to be nul-terminated. */
6328 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
)
6331 /* A strncmp read is constrained not just by the bound but
6332 also by the length of the shorter string. Specifying
6333 a bound that's larger than the size of either array makes
6334 no sense and is likely a bug. When the length of neither
6335 of the two strings is known but the sizes of both of
6336 the arrays they are stored in is, issue a warning if
6337 the bound is larger than than the size of the larger
6338 of the two arrays. */
6340 access_ref
ref1 (arg3
, true);
6341 access_ref
ref2 (arg3
, true);
6343 tree bndrng
[2] = { NULL_TREE
, NULL_TREE
};
6344 get_size_range (arg3
, bndrng
, ref1
.bndrng
);
6346 tree size1
= compute_objsize (arg1
, 1, &ref1
);
6347 tree size2
= compute_objsize (arg2
, 1, &ref2
);
6348 tree func
= get_callee_fndecl (exp
);
6350 if (size1
&& size2
&& bndrng
[0] && !integer_zerop (bndrng
[0]))
6352 offset_int rem1
= ref1
.size_remaining ();
6353 offset_int rem2
= ref2
.size_remaining ();
6354 if (rem1
== 0 || rem2
== 0)
6355 maybe_warn_for_bound (OPT_Wstringop_overread
, loc
, exp
, func
,
6356 bndrng
, integer_zero_node
);
6359 offset_int maxrem
= wi::max (rem1
, rem2
, UNSIGNED
);
6360 if (maxrem
< wi::to_offset (bndrng
[0]))
6361 maybe_warn_for_bound (OPT_Wstringop_overread
, loc
, exp
,
6363 wide_int_to_tree (sizetype
, maxrem
));
6367 && !integer_zerop (bndrng
[0])
6368 && ((size1
&& integer_zerop (size1
))
6369 || (size2
&& integer_zerop (size2
))))
6370 maybe_warn_for_bound (OPT_Wstringop_overread
, loc
, exp
, func
,
6371 bndrng
, integer_zero_node
);
6375 /* Due to the performance benefit, always inline the calls first. */
6376 rtx result
= NULL_RTX
;
6377 result
= inline_expand_builtin_bytecmp (exp
, target
);
6381 /* If c_strlen can determine an expression for one of the string
6382 lengths, and it doesn't have side effects, then emit cmpstrnsi
6383 using length MIN(strlen(string)+1, arg3). */
6384 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
6385 if (cmpstrn_icode
== CODE_FOR_nothing
)
6390 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
6391 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
6394 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
6396 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
6398 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
6400 /* If we don't have a constant length for the first, use the length
6401 of the second, if we know it. If neither string is constant length,
6402 use the given length argument. We don't require a constant for
6403 this case; some cost analysis could be done if both are available
6404 but neither is constant. For now, assume they're equally cheap,
6405 unless one has side effects. If both strings have constant lengths,
6414 else if (TREE_SIDE_EFFECTS (len1
))
6416 else if (TREE_SIDE_EFFECTS (len2
))
6418 else if (TREE_CODE (len1
) != INTEGER_CST
)
6420 else if (TREE_CODE (len2
) != INTEGER_CST
)
6422 else if (tree_int_cst_lt (len1
, len2
))
6427 /* If we are not using the given length, we must incorporate it here.
6428 The actual new length parameter will be MIN(len,arg3) in this case. */
6431 len
= fold_convert_loc (loc
, sizetype
, len
);
6432 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
6434 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
6435 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
6436 rtx arg3_rtx
= expand_normal (len
);
6437 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
6438 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
6439 MIN (arg1_align
, arg2_align
));
6441 tree fndecl
= get_callee_fndecl (exp
);
6444 /* Return the value in the proper mode for this function. */
6445 mode
= TYPE_MODE (TREE_TYPE (exp
));
6446 if (GET_MODE (result
) == mode
)
6449 return convert_to_mode (mode
, result
, 0);
6450 convert_move (target
, result
, 0);
6454 /* Expand the library call ourselves using a stabilized argument
6455 list to avoid re-evaluating the function's arguments twice. */
6456 tree call
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
6457 if (TREE_NO_WARNING (exp
))
6458 TREE_NO_WARNING (call
) = true;
6459 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
6460 CALL_EXPR_TAILCALL (call
) = CALL_EXPR_TAILCALL (exp
);
6461 return expand_call (call
, target
, target
== const0_rtx
);
6464 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
6465 if that's convenient. */
6468 expand_builtin_saveregs (void)
6473 /* Don't do __builtin_saveregs more than once in a function.
6474 Save the result of the first call and reuse it. */
6475 if (saveregs_value
!= 0)
6476 return saveregs_value
;
6478 /* When this function is called, it means that registers must be
6479 saved on entry to this function. So we migrate the call to the
6480 first insn of this function. */
6484 /* Do whatever the machine needs done in this case. */
6485 val
= targetm
.calls
.expand_builtin_saveregs ();
6490 saveregs_value
= val
;
6492 /* Put the insns after the NOTE that starts the function. If this
6493 is inside a start_sequence, make the outer-level insn chain current, so
6494 the code is placed at the start of the function. */
6495 push_topmost_sequence ();
6496 emit_insn_after (seq
, entry_of_function ());
6497 pop_topmost_sequence ();
6502 /* Expand a call to __builtin_next_arg. */
6505 expand_builtin_next_arg (void)
6507 /* Checking arguments is already done in fold_builtin_next_arg
6508 that must be called before this function. */
6509 return expand_binop (ptr_mode
, add_optab
,
6510 crtl
->args
.internal_arg_pointer
,
6511 crtl
->args
.arg_offset_rtx
,
6512 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
6515 /* Make it easier for the backends by protecting the valist argument
6516 from multiple evaluations. */
6519 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
6521 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
6523 /* The current way of determining the type of valist is completely
6524 bogus. We should have the information on the va builtin instead. */
6526 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
6528 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
6530 if (TREE_SIDE_EFFECTS (valist
))
6531 valist
= save_expr (valist
);
6533 /* For this case, the backends will be expecting a pointer to
6534 vatype, but it's possible we've actually been given an array
6535 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
6537 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
6539 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
6540 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
6545 tree pt
= build_pointer_type (vatype
);
6549 if (! TREE_SIDE_EFFECTS (valist
))
6552 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
6553 TREE_SIDE_EFFECTS (valist
) = 1;
6556 if (TREE_SIDE_EFFECTS (valist
))
6557 valist
= save_expr (valist
);
6558 valist
= fold_build2_loc (loc
, MEM_REF
,
6559 vatype
, valist
, build_int_cst (pt
, 0));
6565 /* The "standard" definition of va_list is void*. */
6568 std_build_builtin_va_list (void)
6570 return ptr_type_node
;
6573 /* The "standard" abi va_list is va_list_type_node. */
6576 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
6578 return va_list_type_node
;
6581 /* The "standard" type of va_list is va_list_type_node. */
6584 std_canonical_va_list_type (tree type
)
6588 wtype
= va_list_type_node
;
6591 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
6593 /* If va_list is an array type, the argument may have decayed
6594 to a pointer type, e.g. by being passed to another function.
6595 In that case, unwrap both types so that we can compare the
6596 underlying records. */
6597 if (TREE_CODE (htype
) == ARRAY_TYPE
6598 || POINTER_TYPE_P (htype
))
6600 wtype
= TREE_TYPE (wtype
);
6601 htype
= TREE_TYPE (htype
);
6604 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
6605 return va_list_type_node
;
6610 /* The "standard" implementation of va_start: just assign `nextarg' to
6614 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
6616 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6617 convert_move (va_r
, nextarg
, 0);
6620 /* Expand EXP, a call to __builtin_va_start. */
6623 expand_builtin_va_start (tree exp
)
6627 location_t loc
= EXPR_LOCATION (exp
);
6629 if (call_expr_nargs (exp
) < 2)
6631 error_at (loc
, "too few arguments to function %<va_start%>");
6635 if (fold_builtin_next_arg (exp
, true))
6638 nextarg
= expand_builtin_next_arg ();
6639 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
6641 if (targetm
.expand_builtin_va_start
)
6642 targetm
.expand_builtin_va_start (valist
, nextarg
);
6644 std_expand_builtin_va_start (valist
, nextarg
);
6649 /* Expand EXP, a call to __builtin_va_end. */
6652 expand_builtin_va_end (tree exp
)
6654 tree valist
= CALL_EXPR_ARG (exp
, 0);
6656 /* Evaluate for side effects, if needed. I hate macros that don't
6658 if (TREE_SIDE_EFFECTS (valist
))
6659 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6664 /* Expand EXP, a call to __builtin_va_copy. We do this as a
6665 builtin rather than just as an assignment in stdarg.h because of the
6666 nastiness of array-type va_list types. */
6669 expand_builtin_va_copy (tree exp
)
6672 location_t loc
= EXPR_LOCATION (exp
);
6674 dst
= CALL_EXPR_ARG (exp
, 0);
6675 src
= CALL_EXPR_ARG (exp
, 1);
6677 dst
= stabilize_va_list_loc (loc
, dst
, 1);
6678 src
= stabilize_va_list_loc (loc
, src
, 0);
6680 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
6682 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
6684 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
6685 TREE_SIDE_EFFECTS (t
) = 1;
6686 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6690 rtx dstb
, srcb
, size
;
6692 /* Evaluate to pointers. */
6693 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
6694 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
6695 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
6696 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6698 dstb
= convert_memory_address (Pmode
, dstb
);
6699 srcb
= convert_memory_address (Pmode
, srcb
);
6701 /* "Dereference" to BLKmode memories. */
6702 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
6703 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
6704 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
6705 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
6706 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
6707 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
6710 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
6716 /* Expand a call to one of the builtin functions __builtin_frame_address or
6717 __builtin_return_address. */
6720 expand_builtin_frame_address (tree fndecl
, tree exp
)
6722 /* The argument must be a nonnegative integer constant.
6723 It counts the number of frames to scan up the stack.
6724 The value is either the frame pointer value or the return
6725 address saved in that frame. */
6726 if (call_expr_nargs (exp
) == 0)
6727 /* Warning about missing arg was already issued. */
6729 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
6731 error ("invalid argument to %qD", fndecl
);
6736 /* Number of frames to scan up the stack. */
6737 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
6739 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
6741 /* Some ports cannot access arbitrary stack frames. */
6744 warning (0, "unsupported argument to %qD", fndecl
);
6750 /* Warn since no effort is made to ensure that any frame
6751 beyond the current one exists or can be safely reached. */
6752 warning (OPT_Wframe_address
, "calling %qD with "
6753 "a nonzero argument is unsafe", fndecl
);
6756 /* For __builtin_frame_address, return what we've got. */
6757 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
6761 && ! CONSTANT_P (tem
))
6762 tem
= copy_addr_to_reg (tem
);
6767 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
6768 failed and the caller should emit a normal call. */
6771 expand_builtin_alloca (tree exp
)
6776 tree fndecl
= get_callee_fndecl (exp
);
6777 HOST_WIDE_INT max_size
;
6778 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6779 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
6781 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6782 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
6784 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
6785 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6786 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
6792 && warn_vla_limit
>= HOST_WIDE_INT_MAX
6793 && warn_alloc_size_limit
< warn_vla_limit
)
6795 && warn_alloca_limit
>= HOST_WIDE_INT_MAX
6796 && warn_alloc_size_limit
< warn_alloca_limit
6799 /* -Walloca-larger-than and -Wvla-larger-than settings of
6800 less than HOST_WIDE_INT_MAX override the more general
6801 -Walloc-size-larger-than so unless either of the former
6802 options is smaller than the last one (wchich would imply
6803 that the call was already checked), check the alloca
6804 arguments for overflow. */
6805 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
6806 int idx
[] = { 0, -1 };
6807 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
6810 /* Compute the argument. */
6811 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
6813 /* Compute the alignment. */
6814 align
= (fcode
== BUILT_IN_ALLOCA
6816 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
6818 /* Compute the maximum size. */
6819 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6820 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
6823 /* Allocate the desired space. If the allocation stems from the declaration
6824 of a variable-sized object, it cannot accumulate. */
6826 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
6827 result
= convert_memory_address (ptr_mode
, result
);
6829 /* Dynamic allocations for variables are recorded during gimplification. */
6830 if (!alloca_for_var
&& (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
))
6831 record_dynamic_alloc (exp
);
6836 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6837 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6838 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6839 handle_builtin_stack_restore function. */
6842 expand_asan_emit_allocas_unpoison (tree exp
)
6844 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6845 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6846 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
6847 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
6848 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
6849 stack_pointer_rtx
, NULL_RTX
, 0,
6851 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
6852 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
6854 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
6855 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
6856 top
, ptr_mode
, bot
, ptr_mode
);
6860 /* Expand a call to bswap builtin in EXP.
6861 Return NULL_RTX if a normal call should be emitted rather than expanding the
6862 function in-line. If convenient, the result should be placed in TARGET.
6863 SUBTARGET may be used as the target for computing one of EXP's operands. */
6866 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
6872 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
6875 arg
= CALL_EXPR_ARG (exp
, 0);
6876 op0
= expand_expr (arg
,
6877 subtarget
&& GET_MODE (subtarget
) == target_mode
6878 ? subtarget
: NULL_RTX
,
6879 target_mode
, EXPAND_NORMAL
);
6880 if (GET_MODE (op0
) != target_mode
)
6881 op0
= convert_to_mode (target_mode
, op0
, 1);
6883 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
6885 gcc_assert (target
);
6887 return convert_to_mode (target_mode
, target
, 1);
6890 /* Expand a call to a unary builtin in EXP.
6891 Return NULL_RTX if a normal call should be emitted rather than expanding the
6892 function in-line. If convenient, the result should be placed in TARGET.
6893 SUBTARGET may be used as the target for computing one of EXP's operands. */
6896 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
6897 rtx subtarget
, optab op_optab
)
6901 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
6904 /* Compute the argument. */
6905 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
6907 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
6908 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
6909 VOIDmode
, EXPAND_NORMAL
);
6910 /* Compute op, into TARGET if possible.
6911 Set TARGET to wherever the result comes back. */
6912 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
6913 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
6914 gcc_assert (target
);
6916 return convert_to_mode (target_mode
, target
, 0);
6919 /* Expand a call to __builtin_expect. We just return our argument
6920 as the builtin_expect semantic should've been already executed by
6921 tree branch prediction pass. */
6924 expand_builtin_expect (tree exp
, rtx target
)
6928 if (call_expr_nargs (exp
) < 2)
6930 arg
= CALL_EXPR_ARG (exp
, 0);
6932 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
6933 /* When guessing was done, the hints should be already stripped away. */
6934 gcc_assert (!flag_guess_branch_prob
6935 || optimize
== 0 || seen_error ());
6939 /* Expand a call to __builtin_expect_with_probability. We just return our
6940 argument as the builtin_expect semantic should've been already executed by
6941 tree branch prediction pass. */
6944 expand_builtin_expect_with_probability (tree exp
, rtx target
)
6948 if (call_expr_nargs (exp
) < 3)
6950 arg
= CALL_EXPR_ARG (exp
, 0);
6952 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
6953 /* When guessing was done, the hints should be already stripped away. */
6954 gcc_assert (!flag_guess_branch_prob
6955 || optimize
== 0 || seen_error ());
6960 /* Expand a call to __builtin_assume_aligned. We just return our first
6961 argument as the builtin_assume_aligned semantic should've been already
6965 expand_builtin_assume_aligned (tree exp
, rtx target
)
6967 if (call_expr_nargs (exp
) < 2)
6969 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
6971 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
6972 && (call_expr_nargs (exp
) < 3
6973 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
6978 expand_builtin_trap (void)
6980 if (targetm
.have_trap ())
6982 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
6983 /* For trap insns when not accumulating outgoing args force
6984 REG_ARGS_SIZE note to prevent crossjumping of calls with
6985 different args sizes. */
6986 if (!ACCUMULATE_OUTGOING_ARGS
)
6987 add_args_size_note (insn
, stack_pointer_delta
);
6991 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
6992 tree call_expr
= build_call_expr (fn
, 0);
6993 expand_call (call_expr
, NULL_RTX
, false);
6999 /* Expand a call to __builtin_unreachable. We do nothing except emit
7000 a barrier saying that control flow will not pass here.
7002 It is the responsibility of the program being compiled to ensure
7003 that control flow does never reach __builtin_unreachable. */
7005 expand_builtin_unreachable (void)
7010 /* Expand EXP, a call to fabs, fabsf or fabsl.
7011 Return NULL_RTX if a normal call should be emitted rather than expanding
7012 the function inline. If convenient, the result should be placed
7013 in TARGET. SUBTARGET may be used as the target for computing
7017 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
7023 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
7026 arg
= CALL_EXPR_ARG (exp
, 0);
7027 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
7028 mode
= TYPE_MODE (TREE_TYPE (arg
));
7029 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
7030 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
7033 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7034 Return NULL is a normal call should be emitted rather than expanding the
7035 function inline. If convenient, the result should be placed in TARGET.
7036 SUBTARGET may be used as the target for computing the operand. */
7039 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
7044 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
7047 arg
= CALL_EXPR_ARG (exp
, 0);
7048 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
7050 arg
= CALL_EXPR_ARG (exp
, 1);
7051 op1
= expand_normal (arg
);
7053 return expand_copysign (op0
, op1
, target
);
7056 /* Expand a call to __builtin___clear_cache. */
7059 expand_builtin___clear_cache (tree exp
)
7061 if (!targetm
.code_for_clear_cache
)
7063 #ifdef CLEAR_INSN_CACHE
7064 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7065 does something. Just do the default expansion to a call to
7069 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7070 does nothing. There is no need to call it. Do nothing. */
7072 #endif /* CLEAR_INSN_CACHE */
7075 /* We have a "clear_cache" insn, and it will handle everything. */
7077 rtx begin_rtx
, end_rtx
;
7079 /* We must not expand to a library call. If we did, any
7080 fallback library function in libgcc that might contain a call to
7081 __builtin___clear_cache() would recurse infinitely. */
7082 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7084 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7088 if (targetm
.have_clear_cache ())
7090 class expand_operand ops
[2];
7092 begin
= CALL_EXPR_ARG (exp
, 0);
7093 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
7095 end
= CALL_EXPR_ARG (exp
, 1);
7096 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
7098 create_address_operand (&ops
[0], begin_rtx
);
7099 create_address_operand (&ops
[1], end_rtx
);
7100 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
7106 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7109 round_trampoline_addr (rtx tramp
)
7111 rtx temp
, addend
, mask
;
7113 /* If we don't need too much alignment, we'll have been guaranteed
7114 proper alignment by get_trampoline_type. */
7115 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
7118 /* Round address up to desired boundary. */
7119 temp
= gen_reg_rtx (Pmode
);
7120 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
7121 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
7123 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
7124 temp
, 0, OPTAB_LIB_WIDEN
);
7125 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
7126 temp
, 0, OPTAB_LIB_WIDEN
);
7132 expand_builtin_init_trampoline (tree exp
, bool onstack
)
7134 tree t_tramp
, t_func
, t_chain
;
7135 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
7137 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
7138 POINTER_TYPE
, VOID_TYPE
))
7141 t_tramp
= CALL_EXPR_ARG (exp
, 0);
7142 t_func
= CALL_EXPR_ARG (exp
, 1);
7143 t_chain
= CALL_EXPR_ARG (exp
, 2);
7145 r_tramp
= expand_normal (t_tramp
);
7146 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
7147 MEM_NOTRAP_P (m_tramp
) = 1;
7149 /* If ONSTACK, the TRAMP argument should be the address of a field
7150 within the local function's FRAME decl. Either way, let's see if
7151 we can fill in the MEM_ATTRs for this memory. */
7152 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
7153 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
7155 /* Creator of a heap trampoline is responsible for making sure the
7156 address is aligned to at least STACK_BOUNDARY. Normally malloc
7157 will ensure this anyhow. */
7158 tmp
= round_trampoline_addr (r_tramp
);
7161 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
7162 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
7163 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
7166 /* The FUNC argument should be the address of the nested function.
7167 Extract the actual function decl to pass to the hook. */
7168 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
7169 t_func
= TREE_OPERAND (t_func
, 0);
7170 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
7172 r_chain
= expand_normal (t_chain
);
7174 /* Generate insns to initialize the trampoline. */
7175 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
7179 trampolines_created
= 1;
7181 if (targetm
.calls
.custom_function_descriptors
!= 0)
7182 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
7183 "trampoline generated for nested function %qD", t_func
);
7190 expand_builtin_adjust_trampoline (tree exp
)
7194 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7197 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
7198 tramp
= round_trampoline_addr (tramp
);
7199 if (targetm
.calls
.trampoline_adjust_address
)
7200 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
7205 /* Expand a call to the builtin descriptor initialization routine.
7206 A descriptor is made up of a couple of pointers to the static
7207 chain and the code entry in this order. */
7210 expand_builtin_init_descriptor (tree exp
)
7212 tree t_descr
, t_func
, t_chain
;
7213 rtx m_descr
, r_descr
, r_func
, r_chain
;
7215 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
7219 t_descr
= CALL_EXPR_ARG (exp
, 0);
7220 t_func
= CALL_EXPR_ARG (exp
, 1);
7221 t_chain
= CALL_EXPR_ARG (exp
, 2);
7223 r_descr
= expand_normal (t_descr
);
7224 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
7225 MEM_NOTRAP_P (m_descr
) = 1;
7226 set_mem_align (m_descr
, GET_MODE_ALIGNMENT (ptr_mode
));
7228 r_func
= expand_normal (t_func
);
7229 r_chain
= expand_normal (t_chain
);
7231 /* Generate insns to initialize the descriptor. */
7232 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
7233 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
7234 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
7239 /* Expand a call to the builtin descriptor adjustment routine. */
7242 expand_builtin_adjust_descriptor (tree exp
)
7246 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7249 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
7251 /* Unalign the descriptor to allow runtime identification. */
7252 tramp
= plus_constant (ptr_mode
, tramp
,
7253 targetm
.calls
.custom_function_descriptors
);
7255 return force_operand (tramp
, NULL_RTX
);
7258 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
7259 function. The function first checks whether the back end provides
7260 an insn to implement signbit for the respective mode. If not, it
7261 checks whether the floating point format of the value is such that
7262 the sign bit can be extracted. If that is not the case, error out.
7263 EXP is the expression that is a call to the builtin function; if
7264 convenient, the result should be placed in TARGET. */
7266 expand_builtin_signbit (tree exp
, rtx target
)
7268 const struct real_format
*fmt
;
7269 scalar_float_mode fmode
;
7270 scalar_int_mode rmode
, imode
;
7273 enum insn_code icode
;
7275 location_t loc
= EXPR_LOCATION (exp
);
7277 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
7280 arg
= CALL_EXPR_ARG (exp
, 0);
7281 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
7282 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
7283 fmt
= REAL_MODE_FORMAT (fmode
);
7285 arg
= builtin_save_expr (arg
);
7287 /* Expand the argument yielding a RTX expression. */
7288 temp
= expand_normal (arg
);
7290 /* Check if the back end provides an insn that handles signbit for the
7292 icode
= optab_handler (signbit_optab
, fmode
);
7293 if (icode
!= CODE_FOR_nothing
)
7295 rtx_insn
*last
= get_last_insn ();
7296 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7297 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
7299 delete_insns_since (last
);
7302 /* For floating point formats without a sign bit, implement signbit
7304 bitpos
= fmt
->signbit_ro
;
7307 /* But we can't do this if the format supports signed zero. */
7308 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
7310 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
7311 build_real (TREE_TYPE (arg
), dconst0
));
7312 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
7315 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
7317 imode
= int_mode_for_mode (fmode
).require ();
7318 temp
= gen_lowpart (imode
, temp
);
7323 /* Handle targets with different FP word orders. */
7324 if (FLOAT_WORDS_BIG_ENDIAN
)
7325 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
7327 word
= bitpos
/ BITS_PER_WORD
;
7328 temp
= operand_subword_force (temp
, word
, fmode
);
7329 bitpos
= bitpos
% BITS_PER_WORD
;
7332 /* Force the intermediate word_mode (or narrower) result into a
7333 register. This avoids attempting to create paradoxical SUBREGs
7334 of floating point modes below. */
7335 temp
= force_reg (imode
, temp
);
7337 /* If the bitpos is within the "result mode" lowpart, the operation
7338 can be implement with a single bitwise AND. Otherwise, we need
7339 a right shift and an AND. */
7341 if (bitpos
< GET_MODE_BITSIZE (rmode
))
7343 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
7345 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
7346 temp
= gen_lowpart (rmode
, temp
);
7347 temp
= expand_binop (rmode
, and_optab
, temp
,
7348 immed_wide_int_const (mask
, rmode
),
7349 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
7353 /* Perform a logical right shift to place the signbit in the least
7354 significant bit, then truncate the result to the desired mode
7355 and mask just this bit. */
7356 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
7357 temp
= gen_lowpart (rmode
, temp
);
7358 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
7359 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
7365 /* Expand fork or exec calls. TARGET is the desired target of the
7366 call. EXP is the call. FN is the
7367 identificator of the actual function. IGNORE is nonzero if the
7368 value is to be ignored. */
7371 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
7376 if (DECL_FUNCTION_CODE (fn
) != BUILT_IN_FORK
)
7378 tree path
= CALL_EXPR_ARG (exp
, 0);
7379 /* Detect unterminated path. */
7380 if (!check_read_access (exp
, path
))
7383 /* Also detect unterminated first argument. */
7384 switch (DECL_FUNCTION_CODE (fn
))
7386 case BUILT_IN_EXECL
:
7387 case BUILT_IN_EXECLE
:
7388 case BUILT_IN_EXECLP
:
7389 if (!check_read_access (exp
, path
))
7397 /* If we are not profiling, just call the function. */
7398 if (!profile_arc_flag
)
7401 /* Otherwise call the wrapper. This should be equivalent for the rest of
7402 compiler, so the code does not diverge, and the wrapper may run the
7403 code necessary for keeping the profiling sane. */
7405 switch (DECL_FUNCTION_CODE (fn
))
7408 id
= get_identifier ("__gcov_fork");
7411 case BUILT_IN_EXECL
:
7412 id
= get_identifier ("__gcov_execl");
7415 case BUILT_IN_EXECV
:
7416 id
= get_identifier ("__gcov_execv");
7419 case BUILT_IN_EXECLP
:
7420 id
= get_identifier ("__gcov_execlp");
7423 case BUILT_IN_EXECLE
:
7424 id
= get_identifier ("__gcov_execle");
7427 case BUILT_IN_EXECVP
:
7428 id
= get_identifier ("__gcov_execvp");
7431 case BUILT_IN_EXECVE
:
7432 id
= get_identifier ("__gcov_execve");
7439 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
7440 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
7441 DECL_EXTERNAL (decl
) = 1;
7442 TREE_PUBLIC (decl
) = 1;
7443 DECL_ARTIFICIAL (decl
) = 1;
7444 TREE_NOTHROW (decl
) = 1;
7445 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
7446 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
7447 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
7448 return expand_call (call
, target
, ignore
);
7453 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
7454 the pointer in these functions is void*, the tree optimizers may remove
7455 casts. The mode computed in expand_builtin isn't reliable either, due
7456 to __sync_bool_compare_and_swap.
7458 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
7459 group of builtins. This gives us log2 of the mode size. */
7461 static inline machine_mode
7462 get_builtin_sync_mode (int fcode_diff
)
7464 /* The size is not negotiable, so ask not to get BLKmode in return
7465 if the target indicates that a smaller size would be better. */
7466 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
7469 /* Expand the memory expression LOC and return the appropriate memory operand
7470 for the builtin_sync operations. */
7473 get_builtin_sync_mem (tree loc
, machine_mode mode
)
7476 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
7477 ? TREE_TYPE (TREE_TYPE (loc
))
7479 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
7481 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
7482 addr
= convert_memory_address (addr_mode
, addr
);
7484 /* Note that we explicitly do not want any alias information for this
7485 memory, so that we kill all other live memories. Otherwise we don't
7486 satisfy the full barrier semantics of the intrinsic. */
7487 mem
= gen_rtx_MEM (mode
, addr
);
7489 set_mem_addr_space (mem
, addr_space
);
7491 mem
= validize_mem (mem
);
7493 /* The alignment needs to be at least according to that of the mode. */
7494 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
7495 get_pointer_alignment (loc
)));
7496 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
7497 MEM_VOLATILE_P (mem
) = 1;
7502 /* Make sure an argument is in the right mode.
7503 EXP is the tree argument.
7504 MODE is the mode it should be in. */
7507 expand_expr_force_mode (tree exp
, machine_mode mode
)
7510 machine_mode old_mode
;
7512 if (TREE_CODE (exp
) == SSA_NAME
7513 && TYPE_MODE (TREE_TYPE (exp
)) != mode
)
7515 /* Undo argument promotion if possible, as combine might not
7516 be able to do it later due to MEM_VOLATILE_P uses in the
7518 gimple
*g
= get_gimple_for_ssa_name (exp
);
7519 if (g
&& gimple_assign_cast_p (g
))
7521 tree rhs
= gimple_assign_rhs1 (g
);
7522 tree_code code
= gimple_assign_rhs_code (g
);
7523 if (CONVERT_EXPR_CODE_P (code
)
7524 && TYPE_MODE (TREE_TYPE (rhs
)) == mode
7525 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7526 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
7527 && (TYPE_PRECISION (TREE_TYPE (exp
))
7528 > TYPE_PRECISION (TREE_TYPE (rhs
))))
7533 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7534 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
7535 of CONST_INTs, where we know the old_mode only from the call argument. */
7537 old_mode
= GET_MODE (val
);
7538 if (old_mode
== VOIDmode
)
7539 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
7540 val
= convert_modes (mode
, old_mode
, val
, 1);
7545 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
7546 EXP is the CALL_EXPR. CODE is the rtx code
7547 that corresponds to the arithmetic or logical operation from the name;
7548 an exception here is that NOT actually means NAND. TARGET is an optional
7549 place for us to store the results; AFTER is true if this is the
7550 fetch_and_xxx form. */
7553 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
7554 enum rtx_code code
, bool after
,
7558 location_t loc
= EXPR_LOCATION (exp
);
7560 if (code
== NOT
&& warn_sync_nand
)
7562 tree fndecl
= get_callee_fndecl (exp
);
7563 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7565 static bool warned_f_a_n
, warned_n_a_f
;
7569 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7570 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7571 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7572 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7573 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7577 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
7578 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
7579 warned_f_a_n
= true;
7582 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7583 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7584 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7585 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7586 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7590 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
7591 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
7592 warned_n_a_f
= true;
7600 /* Expand the operands. */
7601 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7602 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
7604 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
7608 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
7609 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
7610 true if this is the boolean form. TARGET is a place for us to store the
7611 results; this is NOT optional if IS_BOOL is true. */
7614 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
7615 bool is_bool
, rtx target
)
7617 rtx old_val
, new_val
, mem
;
7620 /* Expand the operands. */
7621 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7622 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
7623 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
7625 pbool
= poval
= NULL
;
7626 if (target
!= const0_rtx
)
7633 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
7634 false, MEMMODEL_SYNC_SEQ_CST
,
7635 MEMMODEL_SYNC_SEQ_CST
))
7641 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
7642 general form is actually an atomic exchange, and some targets only
7643 support a reduced form with the second argument being a constant 1.
7644 EXP is the CALL_EXPR; TARGET is an optional place for us to store
7648 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
7653 /* Expand the operands. */
7654 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7655 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
7657 return expand_sync_lock_test_and_set (target
, mem
, val
);
7660 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
7663 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
7667 /* Expand the operands. */
7668 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7670 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
7673 /* Given an integer representing an ``enum memmodel'', verify its
7674 correctness and return the memory model enum. */
7676 static enum memmodel
7677 get_memmodel (tree exp
)
7680 unsigned HOST_WIDE_INT val
;
7682 = expansion_point_location_if_in_system_header (input_location
);
7684 /* If the parameter is not a constant, it's a run time value so we'll just
7685 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
7686 if (TREE_CODE (exp
) != INTEGER_CST
)
7687 return MEMMODEL_SEQ_CST
;
7689 op
= expand_normal (exp
);
7692 if (targetm
.memmodel_check
)
7693 val
= targetm
.memmodel_check (val
);
7694 else if (val
& ~MEMMODEL_MASK
)
7696 warning_at (loc
, OPT_Winvalid_memory_model
,
7697 "unknown architecture specifier in memory model to builtin");
7698 return MEMMODEL_SEQ_CST
;
7701 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
7702 if (memmodel_base (val
) >= MEMMODEL_LAST
)
7704 warning_at (loc
, OPT_Winvalid_memory_model
,
7705 "invalid memory model argument to builtin");
7706 return MEMMODEL_SEQ_CST
;
7709 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
7710 be conservative and promote consume to acquire. */
7711 if (val
== MEMMODEL_CONSUME
)
7712 val
= MEMMODEL_ACQUIRE
;
7714 return (enum memmodel
) val
;
7717 /* Expand the __atomic_exchange intrinsic:
7718 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
7719 EXP is the CALL_EXPR.
7720 TARGET is an optional place for us to store the results. */
7723 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
7726 enum memmodel model
;
7728 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
7730 if (!flag_inline_atomics
)
7733 /* Expand the operands. */
7734 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7735 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
7737 return expand_atomic_exchange (target
, mem
, val
, model
);
7740 /* Expand the __atomic_compare_exchange intrinsic:
7741 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
7742 TYPE desired, BOOL weak,
7743 enum memmodel success,
7744 enum memmodel failure)
7745 EXP is the CALL_EXPR.
7746 TARGET is an optional place for us to store the results. */
7749 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
7752 rtx expect
, desired
, mem
, oldval
;
7753 rtx_code_label
*label
;
7754 enum memmodel success
, failure
;
7758 = expansion_point_location_if_in_system_header (input_location
);
7760 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
7761 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
7763 if (failure
> success
)
7765 warning_at (loc
, OPT_Winvalid_memory_model
,
7766 "failure memory model cannot be stronger than success "
7767 "memory model for %<__atomic_compare_exchange%>");
7768 success
= MEMMODEL_SEQ_CST
;
7771 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
7773 warning_at (loc
, OPT_Winvalid_memory_model
,
7774 "invalid failure memory model for "
7775 "%<__atomic_compare_exchange%>");
7776 failure
= MEMMODEL_SEQ_CST
;
7777 success
= MEMMODEL_SEQ_CST
;
7781 if (!flag_inline_atomics
)
7784 /* Expand the operands. */
7785 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7787 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7788 expect
= convert_memory_address (Pmode
, expect
);
7789 expect
= gen_rtx_MEM (mode
, expect
);
7790 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
7792 weak
= CALL_EXPR_ARG (exp
, 3);
7794 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
7797 if (target
== const0_rtx
)
7800 /* Lest the rtl backend create a race condition with an imporoper store
7801 to memory, always create a new pseudo for OLDVAL. */
7804 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
7805 is_weak
, success
, failure
))
7808 /* Conditionally store back to EXPECT, lest we create a race condition
7809 with an improper store to memory. */
7810 /* ??? With a rearrangement of atomics at the gimple level, we can handle
7811 the normal case where EXPECT is totally private, i.e. a register. At
7812 which point the store can be unconditional. */
7813 label
= gen_label_rtx ();
7814 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
7815 GET_MODE (target
), 1, label
);
7816 emit_move_insn (expect
, oldval
);
7822 /* Helper function for expand_ifn_atomic_compare_exchange - expand
7823 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
7824 call. The weak parameter must be dropped to match the expected parameter
7825 list and the expected argument changed from value to pointer to memory
7829 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
7832 vec
<tree
, va_gc
> *vec
;
7835 vec
->quick_push (gimple_call_arg (call
, 0));
7836 tree expected
= gimple_call_arg (call
, 1);
7837 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
7838 TREE_TYPE (expected
));
7839 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
7841 emit_move_insn (x
, expd
);
7842 tree v
= make_tree (TREE_TYPE (expected
), x
);
7843 vec
->quick_push (build1 (ADDR_EXPR
,
7844 build_pointer_type (TREE_TYPE (expected
)), v
));
7845 vec
->quick_push (gimple_call_arg (call
, 2));
7846 /* Skip the boolean weak parameter. */
7847 for (z
= 4; z
< 6; z
++)
7848 vec
->quick_push (gimple_call_arg (call
, z
));
7849 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7850 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
7851 gcc_assert (bytes_log2
< 5);
7852 built_in_function fncode
7853 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7855 tree fndecl
= builtin_decl_explicit (fncode
);
7856 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
7858 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
7859 tree lhs
= gimple_call_lhs (call
);
7860 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
7863 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
7864 if (GET_MODE (boolret
) != mode
)
7865 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
7866 x
= force_reg (mode
, x
);
7867 write_complex_part (target
, boolret
, true);
7868 write_complex_part (target
, x
, false);
7872 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7875 expand_ifn_atomic_compare_exchange (gcall
*call
)
7877 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
7878 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
7879 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
7880 rtx expect
, desired
, mem
, oldval
, boolret
;
7881 enum memmodel success
, failure
;
7885 = expansion_point_location_if_in_system_header (gimple_location (call
));
7887 success
= get_memmodel (gimple_call_arg (call
, 4));
7888 failure
= get_memmodel (gimple_call_arg (call
, 5));
7890 if (failure
> success
)
7892 warning_at (loc
, OPT_Winvalid_memory_model
,
7893 "failure memory model cannot be stronger than success "
7894 "memory model for %<__atomic_compare_exchange%>");
7895 success
= MEMMODEL_SEQ_CST
;
7898 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
7900 warning_at (loc
, OPT_Winvalid_memory_model
,
7901 "invalid failure memory model for "
7902 "%<__atomic_compare_exchange%>");
7903 failure
= MEMMODEL_SEQ_CST
;
7904 success
= MEMMODEL_SEQ_CST
;
7907 if (!flag_inline_atomics
)
7909 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
7913 /* Expand the operands. */
7914 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
7916 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
7917 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
7919 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
7924 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
7925 is_weak
, success
, failure
))
7927 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
7931 lhs
= gimple_call_lhs (call
);
7934 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
7935 if (GET_MODE (boolret
) != mode
)
7936 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
7937 write_complex_part (target
, boolret
, true);
7938 write_complex_part (target
, oldval
, false);
7942 /* Expand the __atomic_load intrinsic:
7943 TYPE __atomic_load (TYPE *object, enum memmodel)
7944 EXP is the CALL_EXPR.
7945 TARGET is an optional place for us to store the results. */
7948 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
7951 enum memmodel model
;
7953 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
7954 if (is_mm_release (model
) || is_mm_acq_rel (model
))
7957 = expansion_point_location_if_in_system_header (input_location
);
7958 warning_at (loc
, OPT_Winvalid_memory_model
,
7959 "invalid memory model for %<__atomic_load%>");
7960 model
= MEMMODEL_SEQ_CST
;
7963 if (!flag_inline_atomics
)
7966 /* Expand the operand. */
7967 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
7969 return expand_atomic_load (target
, mem
, model
);
7973 /* Expand the __atomic_store intrinsic:
7974 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7975 EXP is the CALL_EXPR.
7976 TARGET is an optional place for us to store the results. */
7979 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
7982 enum memmodel model
;
7984 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
7985 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
7986 || is_mm_release (model
)))
7989 = expansion_point_location_if_in_system_header (input_location
);
7990 warning_at (loc
, OPT_Winvalid_memory_model
,
7991 "invalid memory model for %<__atomic_store%>");
7992 model
= MEMMODEL_SEQ_CST
;
7995 if (!flag_inline_atomics
)
7998 /* Expand the operands. */
7999 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
8000 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
8002 return expand_atomic_store (mem
, val
, model
, false);
8005 /* Expand the __atomic_fetch_XXX intrinsic:
8006 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8007 EXP is the CALL_EXPR.
8008 TARGET is an optional place for us to store the results.
8009 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8010 FETCH_AFTER is true if returning the result of the operation.
8011 FETCH_AFTER is false if returning the value before the operation.
8012 IGNORE is true if the result is not used.
8013 EXT_CALL is the correct builtin for an external call if this cannot be
8014 resolved to an instruction sequence. */
8017 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
8018 enum rtx_code code
, bool fetch_after
,
8019 bool ignore
, enum built_in_function ext_call
)
8022 enum memmodel model
;
8026 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
8028 /* Expand the operands. */
8029 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
8030 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
8032 /* Only try generating instructions if inlining is turned on. */
8033 if (flag_inline_atomics
)
8035 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
8040 /* Return if a different routine isn't needed for the library call. */
8041 if (ext_call
== BUILT_IN_NONE
)
8044 /* Change the call to the specified function. */
8045 fndecl
= get_callee_fndecl (exp
);
8046 addr
= CALL_EXPR_FN (exp
);
8049 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
8050 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
8052 /* If we will emit code after the call, the call cannot be a tail call.
8053 If it is emitted as a tail call, a barrier is emitted after it, and
8054 then all trailing code is removed. */
8056 CALL_EXPR_TAILCALL (exp
) = 0;
8058 /* Expand the call here so we can emit trailing code. */
8059 ret
= expand_call (exp
, target
, ignore
);
8061 /* Replace the original function just in case it matters. */
8062 TREE_OPERAND (addr
, 0) = fndecl
;
8064 /* Then issue the arithmetic correction to return the right result. */
8069 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
8071 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
8074 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
8080 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8083 expand_ifn_atomic_bit_test_and (gcall
*call
)
8085 tree ptr
= gimple_call_arg (call
, 0);
8086 tree bit
= gimple_call_arg (call
, 1);
8087 tree flag
= gimple_call_arg (call
, 2);
8088 tree lhs
= gimple_call_lhs (call
);
8089 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
8090 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
8093 class expand_operand ops
[5];
8095 gcc_assert (flag_inline_atomics
);
8097 if (gimple_call_num_args (call
) == 4)
8098 model
= get_memmodel (gimple_call_arg (call
, 3));
8100 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
8101 rtx val
= expand_expr_force_mode (bit
, mode
);
8103 switch (gimple_call_internal_fn (call
))
8105 case IFN_ATOMIC_BIT_TEST_AND_SET
:
8107 optab
= atomic_bit_test_and_set_optab
;
8109 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
8111 optab
= atomic_bit_test_and_complement_optab
;
8113 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
8115 optab
= atomic_bit_test_and_reset_optab
;
8121 if (lhs
== NULL_TREE
)
8123 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
8124 val
, NULL_RTX
, true, OPTAB_DIRECT
);
8126 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
8127 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
8131 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
8132 enum insn_code icode
= direct_optab_handler (optab
, mode
);
8133 gcc_assert (icode
!= CODE_FOR_nothing
);
8134 create_output_operand (&ops
[0], target
, mode
);
8135 create_fixed_operand (&ops
[1], mem
);
8136 create_convert_operand_to (&ops
[2], val
, mode
, true);
8137 create_integer_operand (&ops
[3], model
);
8138 create_integer_operand (&ops
[4], integer_onep (flag
));
8139 if (maybe_expand_insn (icode
, 5, ops
))
8143 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
8144 val
, NULL_RTX
, true, OPTAB_DIRECT
);
8147 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
8148 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
8149 code
, model
, false);
8150 if (integer_onep (flag
))
8152 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
8153 NULL_RTX
, true, OPTAB_DIRECT
);
8154 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
8155 true, OPTAB_DIRECT
);
8158 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
8160 if (result
!= target
)
8161 emit_move_insn (target
, result
);
8164 /* Expand an atomic clear operation.
8165 void _atomic_clear (BOOL *obj, enum memmodel)
8166 EXP is the call expression. */
8169 expand_builtin_atomic_clear (tree exp
)
8173 enum memmodel model
;
8175 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
8176 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
8177 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
8179 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
8182 = expansion_point_location_if_in_system_header (input_location
);
8183 warning_at (loc
, OPT_Winvalid_memory_model
,
8184 "invalid memory model for %<__atomic_store%>");
8185 model
= MEMMODEL_SEQ_CST
;
8188 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8189 Failing that, a store is issued by __atomic_store. The only way this can
8190 fail is if the bool type is larger than a word size. Unlikely, but
8191 handle it anyway for completeness. Assume a single threaded model since
8192 there is no atomic support in this case, and no barriers are required. */
8193 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
8195 emit_move_insn (mem
, const0_rtx
);
8199 /* Expand an atomic test_and_set operation.
8200 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8201 EXP is the call expression. */
8204 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
8207 enum memmodel model
;
8210 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
8211 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
8212 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
8214 return expand_atomic_test_and_set (target
, mem
, model
);
8218 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
8219 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
8222 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
8226 unsigned int mode_align
, type_align
;
8228 if (TREE_CODE (arg0
) != INTEGER_CST
)
8231 /* We need a corresponding integer mode for the access to be lock-free. */
8232 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
8233 if (!int_mode_for_size (size
, 0).exists (&mode
))
8234 return boolean_false_node
;
8236 mode_align
= GET_MODE_ALIGNMENT (mode
);
8238 if (TREE_CODE (arg1
) == INTEGER_CST
)
8240 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
8242 /* Either this argument is null, or it's a fake pointer encoding
8243 the alignment of the object. */
8244 val
= least_bit_hwi (val
);
8245 val
*= BITS_PER_UNIT
;
8247 if (val
== 0 || mode_align
< val
)
8248 type_align
= mode_align
;
8254 tree ttype
= TREE_TYPE (arg1
);
8256 /* This function is usually invoked and folded immediately by the front
8257 end before anything else has a chance to look at it. The pointer
8258 parameter at this point is usually cast to a void *, so check for that
8259 and look past the cast. */
8260 if (CONVERT_EXPR_P (arg1
)
8261 && POINTER_TYPE_P (ttype
)
8262 && VOID_TYPE_P (TREE_TYPE (ttype
))
8263 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
8264 arg1
= TREE_OPERAND (arg1
, 0);
8266 ttype
= TREE_TYPE (arg1
);
8267 gcc_assert (POINTER_TYPE_P (ttype
));
8269 /* Get the underlying type of the object. */
8270 ttype
= TREE_TYPE (ttype
);
8271 type_align
= TYPE_ALIGN (ttype
);
8274 /* If the object has smaller alignment, the lock free routines cannot
8276 if (type_align
< mode_align
)
8277 return boolean_false_node
;
8279 /* Check if a compare_and_swap pattern exists for the mode which represents
8280 the required size. The pattern is not allowed to fail, so the existence
8281 of the pattern indicates support is present. Also require that an
8282 atomic load exists for the required size. */
8283 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
8284 return boolean_true_node
;
8286 return boolean_false_node
;
8289 /* Return true if the parameters to call EXP represent an object which will
8290 always generate lock free instructions. The first argument represents the
8291 size of the object, and the second parameter is a pointer to the object
8292 itself. If NULL is passed for the object, then the result is based on
8293 typical alignment for an object of the specified size. Otherwise return
8297 expand_builtin_atomic_always_lock_free (tree exp
)
8300 tree arg0
= CALL_EXPR_ARG (exp
, 0);
8301 tree arg1
= CALL_EXPR_ARG (exp
, 1);
8303 if (TREE_CODE (arg0
) != INTEGER_CST
)
8305 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
8309 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
8310 if (size
== boolean_true_node
)
8315 /* Return a one or zero if it can be determined that object ARG1 of size ARG
8316 is lock free on this architecture. */
8319 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
8321 if (!flag_inline_atomics
)
8324 /* If it isn't always lock free, don't generate a result. */
8325 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
8326 return boolean_true_node
;
8331 /* Return true if the parameters to call EXP represent an object which will
8332 always generate lock free instructions. The first argument represents the
8333 size of the object, and the second parameter is a pointer to the object
8334 itself. If NULL is passed for the object, then the result is based on
8335 typical alignment for an object of the specified size. Otherwise return
8339 expand_builtin_atomic_is_lock_free (tree exp
)
8342 tree arg0
= CALL_EXPR_ARG (exp
, 0);
8343 tree arg1
= CALL_EXPR_ARG (exp
, 1);
8345 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
8347 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
8351 if (!flag_inline_atomics
)
8354 /* If the value is known at compile time, return the RTX for it. */
8355 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
8356 if (size
== boolean_true_node
)
8362 /* Expand the __atomic_thread_fence intrinsic:
8363 void __atomic_thread_fence (enum memmodel)
8364 EXP is the CALL_EXPR. */
8367 expand_builtin_atomic_thread_fence (tree exp
)
8369 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
8370 expand_mem_thread_fence (model
);
8373 /* Expand the __atomic_signal_fence intrinsic:
8374 void __atomic_signal_fence (enum memmodel)
8375 EXP is the CALL_EXPR. */
8378 expand_builtin_atomic_signal_fence (tree exp
)
8380 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
8381 expand_mem_signal_fence (model
);
8384 /* Expand the __sync_synchronize intrinsic. */
8387 expand_builtin_sync_synchronize (void)
8389 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
8393 expand_builtin_thread_pointer (tree exp
, rtx target
)
8395 enum insn_code icode
;
8396 if (!validate_arglist (exp
, VOID_TYPE
))
8398 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
8399 if (icode
!= CODE_FOR_nothing
)
8401 class expand_operand op
;
8402 /* If the target is not sutitable then create a new target. */
8403 if (target
== NULL_RTX
8405 || GET_MODE (target
) != Pmode
)
8406 target
= gen_reg_rtx (Pmode
);
8407 create_output_operand (&op
, target
, Pmode
);
8408 expand_insn (icode
, 1, &op
);
8411 error ("%<__builtin_thread_pointer%> is not supported on this target");
8416 expand_builtin_set_thread_pointer (tree exp
)
8418 enum insn_code icode
;
8419 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
8421 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
8422 if (icode
!= CODE_FOR_nothing
)
8424 class expand_operand op
;
8425 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
8426 Pmode
, EXPAND_NORMAL
);
8427 create_input_operand (&op
, val
, Pmode
);
8428 expand_insn (icode
, 1, &op
);
8431 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
8435 /* Emit code to restore the current value of stack. */
8438 expand_stack_restore (tree var
)
8441 rtx sa
= expand_normal (var
);
8443 sa
= convert_memory_address (Pmode
, sa
);
8445 prev
= get_last_insn ();
8446 emit_stack_restore (SAVE_BLOCK
, sa
);
8448 record_new_stack_level ();
8450 fixup_args_size_notes (prev
, get_last_insn (), 0);
8453 /* Emit code to save the current value of stack. */
8456 expand_stack_save (void)
8460 emit_stack_save (SAVE_BLOCK
, &ret
);
8464 /* Emit code to get the openacc gang, worker or vector id or size. */
8467 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
8470 rtx fallback_retval
;
8471 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
8472 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
8474 case BUILT_IN_GOACC_PARLEVEL_ID
:
8475 name
= "__builtin_goacc_parlevel_id";
8476 fallback_retval
= const0_rtx
;
8477 gen_fn
= targetm
.gen_oacc_dim_pos
;
8479 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
8480 name
= "__builtin_goacc_parlevel_size";
8481 fallback_retval
= const1_rtx
;
8482 gen_fn
= targetm
.gen_oacc_dim_size
;
8488 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
8490 error ("%qs only supported in OpenACC code", name
);
8494 tree arg
= CALL_EXPR_ARG (exp
, 0);
8495 if (TREE_CODE (arg
) != INTEGER_CST
)
8497 error ("non-constant argument 0 to %qs", name
);
8501 int dim
= TREE_INT_CST_LOW (arg
);
8505 case GOMP_DIM_WORKER
:
8506 case GOMP_DIM_VECTOR
:
8509 error ("illegal argument 0 to %qs", name
);
8516 if (target
== NULL_RTX
)
8517 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8519 if (!targetm
.have_oacc_dim_size ())
8521 emit_move_insn (target
, fallback_retval
);
8525 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
8526 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
8528 emit_move_insn (target
, reg
);
8533 /* Expand a string compare operation using a sequence of char comparison
8534 to get rid of the calling overhead, with result going to TARGET if
8537 VAR_STR is the variable string source;
8538 CONST_STR is the constant string source;
8539 LENGTH is the number of chars to compare;
8540 CONST_STR_N indicates which source string is the constant string;
8541 IS_MEMCMP indicates whether it's a memcmp or strcmp.
8543 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
8545 target = (int) (unsigned char) var_str[0]
8546 - (int) (unsigned char) const_str[0];
8550 target = (int) (unsigned char) var_str[length - 2]
8551 - (int) (unsigned char) const_str[length - 2];
8554 target = (int) (unsigned char) var_str[length - 1]
8555 - (int) (unsigned char) const_str[length - 1];
8560 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
8561 unsigned HOST_WIDE_INT length
,
8562 int const_str_n
, machine_mode mode
)
8564 HOST_WIDE_INT offset
= 0;
8566 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
8567 rtx var_rtx
= NULL_RTX
;
8568 rtx const_rtx
= NULL_RTX
;
8569 rtx result
= target
? target
: gen_reg_rtx (mode
);
8570 rtx_code_label
*ne_label
= gen_label_rtx ();
8571 tree unit_type_node
= unsigned_char_type_node
;
8572 scalar_int_mode unit_mode
8573 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
8577 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
8580 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
8581 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
8582 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
8583 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
8585 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
8586 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
8587 result
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
8588 result
, 1, OPTAB_WIDEN
);
8590 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
8591 mode
, true, ne_label
);
8592 offset
+= GET_MODE_SIZE (unit_mode
);
8595 emit_label (ne_label
);
8596 rtx_insn
*insns
= get_insns ();
8603 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
8604 to TARGET if that's convenient.
8605 If the call is not been inlined, return NULL_RTX. */
8608 inline_expand_builtin_bytecmp (tree exp
, rtx target
)
8610 tree fndecl
= get_callee_fndecl (exp
);
8611 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8612 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
8614 /* Do NOT apply this inlining expansion when optimizing for size or
8615 optimization level below 2. */
8616 if (optimize
< 2 || optimize_insn_for_size_p ())
8619 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
8620 || fcode
== BUILT_IN_STRNCMP
8621 || fcode
== BUILT_IN_MEMCMP
);
8623 /* On a target where the type of the call (int) has same or narrower presicion
8624 than unsigned char, give up the inlining expansion. */
8625 if (TYPE_PRECISION (unsigned_char_type_node
)
8626 >= TYPE_PRECISION (TREE_TYPE (exp
)))
8629 tree arg1
= CALL_EXPR_ARG (exp
, 0);
8630 tree arg2
= CALL_EXPR_ARG (exp
, 1);
8631 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
8633 unsigned HOST_WIDE_INT len1
= 0;
8634 unsigned HOST_WIDE_INT len2
= 0;
8635 unsigned HOST_WIDE_INT len3
= 0;
8637 /* Get the object representation of the initializers of ARG1 and ARG2
8638 as strings, provided they refer to constant objects, with their byte
8639 sizes in LEN1 and LEN2, respectively. */
8640 const char *bytes1
= getbyterep (arg1
, &len1
);
8641 const char *bytes2
= getbyterep (arg2
, &len2
);
8643 /* Fail if neither argument refers to an initialized constant. */
8644 if (!bytes1
&& !bytes2
)
8649 /* Fail if the memcmp/strncmp bound is not a constant. */
8650 if (!tree_fits_uhwi_p (len3_tree
))
8653 len3
= tree_to_uhwi (len3_tree
);
8655 if (fcode
== BUILT_IN_MEMCMP
)
8657 /* Fail if the memcmp bound is greater than the size of either
8658 of the two constant objects. */
8659 if ((bytes1
&& len1
< len3
)
8660 || (bytes2
&& len2
< len3
))
8665 if (fcode
!= BUILT_IN_MEMCMP
)
8667 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
8668 and LEN2 to the length of the nul-terminated string stored
8671 len1
= strnlen (bytes1
, len1
) + 1;
8673 len2
= strnlen (bytes2
, len2
) + 1;
8676 /* See inline_string_cmp. */
8682 else if (len2
> len1
)
8687 /* For strncmp only, compute the new bound as the smallest of
8688 the lengths of the two strings (plus 1) and the bound provided
8690 unsigned HOST_WIDE_INT bound
= (const_str_n
== 1) ? len1
: len2
;
8691 if (is_ncmp
&& len3
< bound
)
8694 /* If the bound of the comparison is larger than the threshold,
8696 if (bound
> (unsigned HOST_WIDE_INT
) param_builtin_string_cmp_inline_length
)
8699 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8701 /* Now, start inline expansion the call. */
8702 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
8703 (const_str_n
== 1) ? bytes1
: bytes2
, bound
,
8707 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
8708 represents the size of the first argument to that call, or VOIDmode
8709 if the argument is a pointer. IGNORE will be true if the result
8712 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
8716 unsigned nargs
= call_expr_nargs (exp
);
8718 tree arg0
= CALL_EXPR_ARG (exp
, 0);
8720 if (mode
== VOIDmode
)
8722 mode
= TYPE_MODE (TREE_TYPE (arg0
));
8723 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
8726 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
8728 /* An optional second argument can be used as a failsafe value on
8729 some machines. If it isn't present, then the failsafe value is
8733 tree arg1
= CALL_EXPR_ARG (exp
, 1);
8734 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
8737 failsafe
= const0_rtx
;
8739 /* If the result isn't used, the behavior is undefined. It would be
8740 nice to emit a warning here, but path splitting means this might
8741 happen with legitimate code. So simply drop the builtin
8742 expansion in that case; we've handled any side-effects above. */
8746 /* If we don't have a suitable target, create one to hold the result. */
8747 if (target
== NULL
|| GET_MODE (target
) != mode
)
8748 target
= gen_reg_rtx (mode
);
8750 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
8751 val
= convert_modes (mode
, VOIDmode
, val
, false);
8753 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
8756 /* Expand an expression EXP that calls a built-in function,
8757 with result going to TARGET if that's convenient
8758 (and in mode MODE if that's convenient).
8759 SUBTARGET may be used as the target for computing one of EXP's operands.
8760 IGNORE is nonzero if the value is to be ignored. */
8763 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
8766 tree fndecl
= get_callee_fndecl (exp
);
8767 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
8770 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
8771 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
8773 /* When ASan is enabled, we don't want to expand some memory/string
8774 builtins and rely on libsanitizer's hooks. This allows us to avoid
8775 redundant checks and be sure, that possible overflow will be detected
8778 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8779 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
8780 return expand_call (exp
, target
, ignore
);
8782 /* When not optimizing, generate calls to library functions for a certain
8785 && !called_as_built_in (fndecl
)
8786 && fcode
!= BUILT_IN_FORK
8787 && fcode
!= BUILT_IN_EXECL
8788 && fcode
!= BUILT_IN_EXECV
8789 && fcode
!= BUILT_IN_EXECLP
8790 && fcode
!= BUILT_IN_EXECLE
8791 && fcode
!= BUILT_IN_EXECVP
8792 && fcode
!= BUILT_IN_EXECVE
8793 && !ALLOCA_FUNCTION_CODE_P (fcode
)
8794 && fcode
!= BUILT_IN_FREE
)
8795 return expand_call (exp
, target
, ignore
);
8797 /* The built-in function expanders test for target == const0_rtx
8798 to determine whether the function's result will be ignored. */
8800 target
= const0_rtx
;
8802 /* If the result of a pure or const built-in function is ignored, and
8803 none of its arguments are volatile, we can avoid expanding the
8804 built-in call and just evaluate the arguments for side-effects. */
8805 if (target
== const0_rtx
8806 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
8807 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
8809 bool volatilep
= false;
8811 call_expr_arg_iterator iter
;
8813 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
8814 if (TREE_THIS_VOLATILE (arg
))
8822 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
8823 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
8830 CASE_FLT_FN (BUILT_IN_FABS
):
8831 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8832 case BUILT_IN_FABSD32
:
8833 case BUILT_IN_FABSD64
:
8834 case BUILT_IN_FABSD128
:
8835 target
= expand_builtin_fabs (exp
, target
, subtarget
);
8840 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
8841 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
8842 target
= expand_builtin_copysign (exp
, target
, subtarget
);
8847 /* Just do a normal library call if we were unable to fold
8849 CASE_FLT_FN (BUILT_IN_CABS
):
8852 CASE_FLT_FN (BUILT_IN_FMA
):
8853 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
8854 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
8859 CASE_FLT_FN (BUILT_IN_ILOGB
):
8860 if (! flag_unsafe_math_optimizations
)
8863 CASE_FLT_FN (BUILT_IN_ISINF
):
8864 CASE_FLT_FN (BUILT_IN_FINITE
):
8865 case BUILT_IN_ISFINITE
:
8866 case BUILT_IN_ISNORMAL
:
8867 target
= expand_builtin_interclass_mathfn (exp
, target
);
8872 CASE_FLT_FN (BUILT_IN_ICEIL
):
8873 CASE_FLT_FN (BUILT_IN_LCEIL
):
8874 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8875 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8876 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8877 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8878 target
= expand_builtin_int_roundingfn (exp
, target
);
8883 CASE_FLT_FN (BUILT_IN_IRINT
):
8884 CASE_FLT_FN (BUILT_IN_LRINT
):
8885 CASE_FLT_FN (BUILT_IN_LLRINT
):
8886 CASE_FLT_FN (BUILT_IN_IROUND
):
8887 CASE_FLT_FN (BUILT_IN_LROUND
):
8888 CASE_FLT_FN (BUILT_IN_LLROUND
):
8889 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
8894 CASE_FLT_FN (BUILT_IN_POWI
):
8895 target
= expand_builtin_powi (exp
, target
);
8900 CASE_FLT_FN (BUILT_IN_CEXPI
):
8901 target
= expand_builtin_cexpi (exp
, target
);
8902 gcc_assert (target
);
8905 CASE_FLT_FN (BUILT_IN_SIN
):
8906 CASE_FLT_FN (BUILT_IN_COS
):
8907 if (! flag_unsafe_math_optimizations
)
8909 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
8914 CASE_FLT_FN (BUILT_IN_SINCOS
):
8915 if (! flag_unsafe_math_optimizations
)
8917 target
= expand_builtin_sincos (exp
);
8922 case BUILT_IN_APPLY_ARGS
:
8923 return expand_builtin_apply_args ();
8925 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8926 FUNCTION with a copy of the parameters described by
8927 ARGUMENTS, and ARGSIZE. It returns a block of memory
8928 allocated on the stack into which is stored all the registers
8929 that might possibly be used for returning the result of a
8930 function. ARGUMENTS is the value returned by
8931 __builtin_apply_args. ARGSIZE is the number of bytes of
8932 arguments that must be copied. ??? How should this value be
8933 computed? We'll also need a safe worst case value for varargs
8935 case BUILT_IN_APPLY
:
8936 if (!validate_arglist (exp
, POINTER_TYPE
,
8937 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
8938 && !validate_arglist (exp
, REFERENCE_TYPE
,
8939 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
8945 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
8946 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
8947 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
8949 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8952 /* __builtin_return (RESULT) causes the function to return the
8953 value described by RESULT. RESULT is address of the block of
8954 memory returned by __builtin_apply. */
8955 case BUILT_IN_RETURN
:
8956 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
8957 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
8960 case BUILT_IN_SAVEREGS
:
8961 return expand_builtin_saveregs ();
8963 case BUILT_IN_VA_ARG_PACK
:
8964 /* All valid uses of __builtin_va_arg_pack () are removed during
8966 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
8969 case BUILT_IN_VA_ARG_PACK_LEN
:
8970 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8972 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
8975 /* Return the address of the first anonymous stack arg. */
8976 case BUILT_IN_NEXT_ARG
:
8977 if (fold_builtin_next_arg (exp
, false))
8979 return expand_builtin_next_arg ();
8981 case BUILT_IN_CLEAR_CACHE
:
8982 target
= expand_builtin___clear_cache (exp
);
8987 case BUILT_IN_CLASSIFY_TYPE
:
8988 return expand_builtin_classify_type (exp
);
8990 case BUILT_IN_CONSTANT_P
:
8993 case BUILT_IN_FRAME_ADDRESS
:
8994 case BUILT_IN_RETURN_ADDRESS
:
8995 return expand_builtin_frame_address (fndecl
, exp
);
8997 /* Returns the address of the area where the structure is returned.
8999 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
9000 if (call_expr_nargs (exp
) != 0
9001 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
9002 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
9005 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
9007 CASE_BUILT_IN_ALLOCA
:
9008 target
= expand_builtin_alloca (exp
);
9013 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
9014 return expand_asan_emit_allocas_unpoison (exp
);
9016 case BUILT_IN_STACK_SAVE
:
9017 return expand_stack_save ();
9019 case BUILT_IN_STACK_RESTORE
:
9020 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
9023 case BUILT_IN_BSWAP16
:
9024 case BUILT_IN_BSWAP32
:
9025 case BUILT_IN_BSWAP64
:
9026 case BUILT_IN_BSWAP128
:
9027 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
9032 CASE_INT_FN (BUILT_IN_FFS
):
9033 target
= expand_builtin_unop (target_mode
, exp
, target
,
9034 subtarget
, ffs_optab
);
9039 CASE_INT_FN (BUILT_IN_CLZ
):
9040 target
= expand_builtin_unop (target_mode
, exp
, target
,
9041 subtarget
, clz_optab
);
9046 CASE_INT_FN (BUILT_IN_CTZ
):
9047 target
= expand_builtin_unop (target_mode
, exp
, target
,
9048 subtarget
, ctz_optab
);
9053 CASE_INT_FN (BUILT_IN_CLRSB
):
9054 target
= expand_builtin_unop (target_mode
, exp
, target
,
9055 subtarget
, clrsb_optab
);
9060 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9061 target
= expand_builtin_unop (target_mode
, exp
, target
,
9062 subtarget
, popcount_optab
);
9067 CASE_INT_FN (BUILT_IN_PARITY
):
9068 target
= expand_builtin_unop (target_mode
, exp
, target
,
9069 subtarget
, parity_optab
);
9074 case BUILT_IN_STRLEN
:
9075 target
= expand_builtin_strlen (exp
, target
, target_mode
);
9080 case BUILT_IN_STRNLEN
:
9081 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
9086 case BUILT_IN_STRCAT
:
9087 target
= expand_builtin_strcat (exp
);
9092 case BUILT_IN_GETTEXT
:
9094 case BUILT_IN_PUTS_UNLOCKED
:
9095 case BUILT_IN_STRDUP
:
9096 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
9097 check_read_access (exp
, CALL_EXPR_ARG (exp
, 0));
9100 case BUILT_IN_INDEX
:
9101 case BUILT_IN_RINDEX
:
9102 case BUILT_IN_STRCHR
:
9103 case BUILT_IN_STRRCHR
:
9104 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9105 check_read_access (exp
, CALL_EXPR_ARG (exp
, 0));
9108 case BUILT_IN_FPUTS
:
9109 case BUILT_IN_FPUTS_UNLOCKED
:
9110 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9111 check_read_access (exp
, CALL_EXPR_ARG (exp
, 0));
9114 case BUILT_IN_STRNDUP
:
9115 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9116 check_read_access (exp
, CALL_EXPR_ARG (exp
, 0), CALL_EXPR_ARG (exp
, 1));
9119 case BUILT_IN_STRCASECMP
:
9120 case BUILT_IN_STRPBRK
:
9121 case BUILT_IN_STRSPN
:
9122 case BUILT_IN_STRCSPN
:
9123 case BUILT_IN_STRSTR
:
9124 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9126 check_read_access (exp
, CALL_EXPR_ARG (exp
, 0));
9127 check_read_access (exp
, CALL_EXPR_ARG (exp
, 1));
9131 case BUILT_IN_STRCPY
:
9132 target
= expand_builtin_strcpy (exp
, target
);
9137 case BUILT_IN_STRNCAT
:
9138 target
= expand_builtin_strncat (exp
, target
);
9143 case BUILT_IN_STRNCPY
:
9144 target
= expand_builtin_strncpy (exp
, target
);
9149 case BUILT_IN_STPCPY
:
9150 target
= expand_builtin_stpcpy (exp
, target
, mode
);
9155 case BUILT_IN_STPNCPY
:
9156 target
= expand_builtin_stpncpy (exp
, target
);
9161 case BUILT_IN_MEMCHR
:
9162 target
= expand_builtin_memchr (exp
, target
);
9167 case BUILT_IN_MEMCPY
:
9168 target
= expand_builtin_memcpy (exp
, target
);
9173 case BUILT_IN_MEMMOVE
:
9174 target
= expand_builtin_memmove (exp
, target
);
9179 case BUILT_IN_MEMPCPY
:
9180 target
= expand_builtin_mempcpy (exp
, target
);
9185 case BUILT_IN_MEMSET
:
9186 target
= expand_builtin_memset (exp
, target
, mode
);
9191 case BUILT_IN_BZERO
:
9192 target
= expand_builtin_bzero (exp
);
9197 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9198 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9199 when changing it to a strcmp call. */
9200 case BUILT_IN_STRCMP_EQ
:
9201 target
= expand_builtin_memcmp (exp
, target
, true);
9205 /* Change this call back to a BUILT_IN_STRCMP. */
9206 TREE_OPERAND (exp
, 1)
9207 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
9209 /* Delete the last parameter. */
9211 vec
<tree
, va_gc
> *arg_vec
;
9212 vec_alloc (arg_vec
, 2);
9213 for (i
= 0; i
< 2; i
++)
9214 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
9215 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
9218 case BUILT_IN_STRCMP
:
9219 target
= expand_builtin_strcmp (exp
, target
);
9224 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9225 back to a BUILT_IN_STRNCMP. */
9226 case BUILT_IN_STRNCMP_EQ
:
9227 target
= expand_builtin_memcmp (exp
, target
, true);
9231 /* Change it back to a BUILT_IN_STRNCMP. */
9232 TREE_OPERAND (exp
, 1)
9233 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
9236 case BUILT_IN_STRNCMP
:
9237 target
= expand_builtin_strncmp (exp
, target
, mode
);
9243 case BUILT_IN_MEMCMP
:
9244 case BUILT_IN_MEMCMP_EQ
:
9245 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
9248 if (fcode
== BUILT_IN_MEMCMP_EQ
)
9250 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
9251 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
9255 case BUILT_IN_SETJMP
:
9256 /* This should have been lowered to the builtins below. */
9259 case BUILT_IN_SETJMP_SETUP
:
9260 /* __builtin_setjmp_setup is passed a pointer to an array of five words
9261 and the receiver label. */
9262 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9264 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
9265 VOIDmode
, EXPAND_NORMAL
);
9266 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
9267 rtx_insn
*label_r
= label_rtx (label
);
9269 /* This is copied from the handling of non-local gotos. */
9270 expand_builtin_setjmp_setup (buf_addr
, label_r
);
9271 nonlocal_goto_handler_labels
9272 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
9273 nonlocal_goto_handler_labels
);
9274 /* ??? Do not let expand_label treat us as such since we would
9275 not want to be both on the list of non-local labels and on
9276 the list of forced labels. */
9277 FORCED_LABEL (label
) = 0;
9282 case BUILT_IN_SETJMP_RECEIVER
:
9283 /* __builtin_setjmp_receiver is passed the receiver label. */
9284 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
9286 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
9287 rtx_insn
*label_r
= label_rtx (label
);
9289 expand_builtin_setjmp_receiver (label_r
);
9294 /* __builtin_longjmp is passed a pointer to an array of five words.
9295 It's similar to the C library longjmp function but works with
9296 __builtin_setjmp above. */
9297 case BUILT_IN_LONGJMP
:
9298 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9300 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
9301 VOIDmode
, EXPAND_NORMAL
);
9302 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
9304 if (value
!= const1_rtx
)
9306 error ("%<__builtin_longjmp%> second argument must be 1");
9310 expand_builtin_longjmp (buf_addr
, value
);
9315 case BUILT_IN_NONLOCAL_GOTO
:
9316 target
= expand_builtin_nonlocal_goto (exp
);
9321 /* This updates the setjmp buffer that is its argument with the value
9322 of the current stack pointer. */
9323 case BUILT_IN_UPDATE_SETJMP_BUF
:
9324 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
9327 = expand_normal (CALL_EXPR_ARG (exp
, 0));
9329 expand_builtin_update_setjmp_buf (buf_addr
);
9335 expand_builtin_trap ();
9338 case BUILT_IN_UNREACHABLE
:
9339 expand_builtin_unreachable ();
9342 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9343 case BUILT_IN_SIGNBITD32
:
9344 case BUILT_IN_SIGNBITD64
:
9345 case BUILT_IN_SIGNBITD128
:
9346 target
= expand_builtin_signbit (exp
, target
);
9351 /* Various hooks for the DWARF 2 __throw routine. */
9352 case BUILT_IN_UNWIND_INIT
:
9353 expand_builtin_unwind_init ();
9355 case BUILT_IN_DWARF_CFA
:
9356 return virtual_cfa_rtx
;
9357 #ifdef DWARF2_UNWIND_INFO
9358 case BUILT_IN_DWARF_SP_COLUMN
:
9359 return expand_builtin_dwarf_sp_column ();
9360 case BUILT_IN_INIT_DWARF_REG_SIZES
:
9361 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
9364 case BUILT_IN_FROB_RETURN_ADDR
:
9365 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
9366 case BUILT_IN_EXTRACT_RETURN_ADDR
:
9367 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
9368 case BUILT_IN_EH_RETURN
:
9369 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
9370 CALL_EXPR_ARG (exp
, 1));
9372 case BUILT_IN_EH_RETURN_DATA_REGNO
:
9373 return expand_builtin_eh_return_data_regno (exp
);
9374 case BUILT_IN_EXTEND_POINTER
:
9375 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
9376 case BUILT_IN_EH_POINTER
:
9377 return expand_builtin_eh_pointer (exp
);
9378 case BUILT_IN_EH_FILTER
:
9379 return expand_builtin_eh_filter (exp
);
9380 case BUILT_IN_EH_COPY_VALUES
:
9381 return expand_builtin_eh_copy_values (exp
);
9383 case BUILT_IN_VA_START
:
9384 return expand_builtin_va_start (exp
);
9385 case BUILT_IN_VA_END
:
9386 return expand_builtin_va_end (exp
);
9387 case BUILT_IN_VA_COPY
:
9388 return expand_builtin_va_copy (exp
);
9389 case BUILT_IN_EXPECT
:
9390 return expand_builtin_expect (exp
, target
);
9391 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
9392 return expand_builtin_expect_with_probability (exp
, target
);
9393 case BUILT_IN_ASSUME_ALIGNED
:
9394 return expand_builtin_assume_aligned (exp
, target
);
9395 case BUILT_IN_PREFETCH
:
9396 expand_builtin_prefetch (exp
);
9399 case BUILT_IN_INIT_TRAMPOLINE
:
9400 return expand_builtin_init_trampoline (exp
, true);
9401 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
9402 return expand_builtin_init_trampoline (exp
, false);
9403 case BUILT_IN_ADJUST_TRAMPOLINE
:
9404 return expand_builtin_adjust_trampoline (exp
);
9406 case BUILT_IN_INIT_DESCRIPTOR
:
9407 return expand_builtin_init_descriptor (exp
);
9408 case BUILT_IN_ADJUST_DESCRIPTOR
:
9409 return expand_builtin_adjust_descriptor (exp
);
9412 case BUILT_IN_EXECL
:
9413 case BUILT_IN_EXECV
:
9414 case BUILT_IN_EXECLP
:
9415 case BUILT_IN_EXECLE
:
9416 case BUILT_IN_EXECVP
:
9417 case BUILT_IN_EXECVE
:
9418 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
9423 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
9424 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
9425 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
9426 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
9427 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
9428 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
9429 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
9434 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
9435 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
9436 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
9437 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
9438 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
9439 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
9440 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
9445 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
9446 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
9447 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
9448 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
9449 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
9450 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
9451 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
9456 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
9457 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
9458 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
9459 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
9460 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
9461 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
9462 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
9467 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
9468 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
9469 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
9470 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
9471 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
9472 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
9473 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
9478 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
9479 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
9480 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
9481 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
9482 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
9483 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
9484 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
9489 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
9490 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
9491 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
9492 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
9493 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
9494 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
9495 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
9500 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
9501 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
9502 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
9503 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
9504 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
9505 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
9506 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
9511 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
9512 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
9513 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
9514 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
9515 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
9516 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
9517 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
9522 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
9523 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
9524 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
9525 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
9526 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
9527 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
9528 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
9533 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
9534 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
9535 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
9536 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
9537 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
9538 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
9539 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
9544 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
9545 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
9546 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
9547 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
9548 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
9549 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
9550 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
9555 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
9556 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
9557 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
9558 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
9559 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
9560 if (mode
== VOIDmode
)
9561 mode
= TYPE_MODE (boolean_type_node
);
9562 if (!target
|| !register_operand (target
, mode
))
9563 target
= gen_reg_rtx (mode
);
9565 mode
= get_builtin_sync_mode
9566 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
9567 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
9572 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
9573 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
9574 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
9575 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
9576 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
9577 mode
= get_builtin_sync_mode
9578 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
9579 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
9584 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
9585 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
9586 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
9587 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
9588 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
9589 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
9590 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
9595 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
9596 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
9597 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
9598 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
9599 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
9600 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
9601 expand_builtin_sync_lock_release (mode
, exp
);
9604 case BUILT_IN_SYNC_SYNCHRONIZE
:
9605 expand_builtin_sync_synchronize ();
9608 case BUILT_IN_ATOMIC_EXCHANGE_1
:
9609 case BUILT_IN_ATOMIC_EXCHANGE_2
:
9610 case BUILT_IN_ATOMIC_EXCHANGE_4
:
9611 case BUILT_IN_ATOMIC_EXCHANGE_8
:
9612 case BUILT_IN_ATOMIC_EXCHANGE_16
:
9613 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
9614 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
9619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
9620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
9621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
9622 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
9623 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
9625 unsigned int nargs
, z
;
9626 vec
<tree
, va_gc
> *vec
;
9629 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
9630 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
9634 /* If this is turned into an external library call, the weak parameter
9635 must be dropped to match the expected parameter list. */
9636 nargs
= call_expr_nargs (exp
);
9637 vec_alloc (vec
, nargs
- 1);
9638 for (z
= 0; z
< 3; z
++)
9639 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
9640 /* Skip the boolean weak parameter. */
9641 for (z
= 4; z
< 6; z
++)
9642 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
9643 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
9647 case BUILT_IN_ATOMIC_LOAD_1
:
9648 case BUILT_IN_ATOMIC_LOAD_2
:
9649 case BUILT_IN_ATOMIC_LOAD_4
:
9650 case BUILT_IN_ATOMIC_LOAD_8
:
9651 case BUILT_IN_ATOMIC_LOAD_16
:
9652 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
9653 target
= expand_builtin_atomic_load (mode
, exp
, target
);
9658 case BUILT_IN_ATOMIC_STORE_1
:
9659 case BUILT_IN_ATOMIC_STORE_2
:
9660 case BUILT_IN_ATOMIC_STORE_4
:
9661 case BUILT_IN_ATOMIC_STORE_8
:
9662 case BUILT_IN_ATOMIC_STORE_16
:
9663 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
9664 target
= expand_builtin_atomic_store (mode
, exp
);
9669 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
9670 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
9671 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
9672 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
9673 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
9675 enum built_in_function lib
;
9676 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
9677 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
9678 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
9679 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
9685 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
9686 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
9687 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
9688 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
9689 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
9691 enum built_in_function lib
;
9692 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
9693 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
9694 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
9695 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
9701 case BUILT_IN_ATOMIC_AND_FETCH_1
:
9702 case BUILT_IN_ATOMIC_AND_FETCH_2
:
9703 case BUILT_IN_ATOMIC_AND_FETCH_4
:
9704 case BUILT_IN_ATOMIC_AND_FETCH_8
:
9705 case BUILT_IN_ATOMIC_AND_FETCH_16
:
9707 enum built_in_function lib
;
9708 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
9709 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
9710 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
9711 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
9717 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
9718 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
9719 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
9720 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
9721 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
9723 enum built_in_function lib
;
9724 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
9725 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
9726 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
9727 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
9733 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
9734 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
9735 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
9736 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
9737 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
9739 enum built_in_function lib
;
9740 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
9741 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
9742 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
9743 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
9749 case BUILT_IN_ATOMIC_OR_FETCH_1
:
9750 case BUILT_IN_ATOMIC_OR_FETCH_2
:
9751 case BUILT_IN_ATOMIC_OR_FETCH_4
:
9752 case BUILT_IN_ATOMIC_OR_FETCH_8
:
9753 case BUILT_IN_ATOMIC_OR_FETCH_16
:
9755 enum built_in_function lib
;
9756 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
9757 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
9758 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
9759 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
9765 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
9766 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
9767 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
9768 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
9769 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
9770 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
9771 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
9772 ignore
, BUILT_IN_NONE
);
9777 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
9778 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
9779 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
9780 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
9781 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
9782 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
9783 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
9784 ignore
, BUILT_IN_NONE
);
9789 case BUILT_IN_ATOMIC_FETCH_AND_1
:
9790 case BUILT_IN_ATOMIC_FETCH_AND_2
:
9791 case BUILT_IN_ATOMIC_FETCH_AND_4
:
9792 case BUILT_IN_ATOMIC_FETCH_AND_8
:
9793 case BUILT_IN_ATOMIC_FETCH_AND_16
:
9794 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
9795 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
9796 ignore
, BUILT_IN_NONE
);
9801 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
9802 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
9803 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
9804 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
9805 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
9806 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
9807 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
9808 ignore
, BUILT_IN_NONE
);
9813 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
9814 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
9815 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
9816 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
9817 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
9818 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
9819 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
9820 ignore
, BUILT_IN_NONE
);
9825 case BUILT_IN_ATOMIC_FETCH_OR_1
:
9826 case BUILT_IN_ATOMIC_FETCH_OR_2
:
9827 case BUILT_IN_ATOMIC_FETCH_OR_4
:
9828 case BUILT_IN_ATOMIC_FETCH_OR_8
:
9829 case BUILT_IN_ATOMIC_FETCH_OR_16
:
9830 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
9831 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
9832 ignore
, BUILT_IN_NONE
);
9837 case BUILT_IN_ATOMIC_TEST_AND_SET
:
9838 return expand_builtin_atomic_test_and_set (exp
, target
);
9840 case BUILT_IN_ATOMIC_CLEAR
:
9841 return expand_builtin_atomic_clear (exp
);
9843 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9844 return expand_builtin_atomic_always_lock_free (exp
);
9846 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9847 target
= expand_builtin_atomic_is_lock_free (exp
);
9852 case BUILT_IN_ATOMIC_THREAD_FENCE
:
9853 expand_builtin_atomic_thread_fence (exp
);
9856 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
9857 expand_builtin_atomic_signal_fence (exp
);
9860 case BUILT_IN_OBJECT_SIZE
:
9861 return expand_builtin_object_size (exp
);
9863 case BUILT_IN_MEMCPY_CHK
:
9864 case BUILT_IN_MEMPCPY_CHK
:
9865 case BUILT_IN_MEMMOVE_CHK
:
9866 case BUILT_IN_MEMSET_CHK
:
9867 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
9872 case BUILT_IN_STRCPY_CHK
:
9873 case BUILT_IN_STPCPY_CHK
:
9874 case BUILT_IN_STRNCPY_CHK
:
9875 case BUILT_IN_STPNCPY_CHK
:
9876 case BUILT_IN_STRCAT_CHK
:
9877 case BUILT_IN_STRNCAT_CHK
:
9878 case BUILT_IN_SNPRINTF_CHK
:
9879 case BUILT_IN_VSNPRINTF_CHK
:
9880 maybe_emit_chk_warning (exp
, fcode
);
9883 case BUILT_IN_SPRINTF_CHK
:
9884 case BUILT_IN_VSPRINTF_CHK
:
9885 maybe_emit_sprintf_chk_warning (exp
, fcode
);
9889 if (warn_free_nonheap_object
)
9890 maybe_emit_free_warning (exp
);
9893 case BUILT_IN_THREAD_POINTER
:
9894 return expand_builtin_thread_pointer (exp
, target
);
9896 case BUILT_IN_SET_THREAD_POINTER
:
9897 expand_builtin_set_thread_pointer (exp
);
9900 case BUILT_IN_ACC_ON_DEVICE
:
9901 /* Do library call, if we failed to expand the builtin when
9905 case BUILT_IN_GOACC_PARLEVEL_ID
:
9906 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
9907 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
9909 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
9910 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
9912 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
9913 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
9914 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
9915 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
9916 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
9917 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
9918 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
9920 default: /* just do library call, if unknown builtin */
9924 /* The switch statement above can drop through to cause the function
9925 to be called normally. */
9926 return expand_call (exp
, target
, ignore
);
9929 /* Determine whether a tree node represents a call to a built-in
9930 function. If the tree T is a call to a built-in function with
9931 the right number of arguments of the appropriate types, return
9932 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9933 Otherwise the return value is END_BUILTINS. */
9935 enum built_in_function
9936 builtin_mathfn_code (const_tree t
)
9938 const_tree fndecl
, arg
, parmlist
;
9939 const_tree argtype
, parmtype
;
9940 const_call_expr_arg_iterator iter
;
9942 if (TREE_CODE (t
) != CALL_EXPR
)
9943 return END_BUILTINS
;
9945 fndecl
= get_callee_fndecl (t
);
9946 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
9947 return END_BUILTINS
;
9949 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
9950 init_const_call_expr_arg_iterator (t
, &iter
);
9951 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
9953 /* If a function doesn't take a variable number of arguments,
9954 the last element in the list will have type `void'. */
9955 parmtype
= TREE_VALUE (parmlist
);
9956 if (VOID_TYPE_P (parmtype
))
9958 if (more_const_call_expr_args_p (&iter
))
9959 return END_BUILTINS
;
9960 return DECL_FUNCTION_CODE (fndecl
);
9963 if (! more_const_call_expr_args_p (&iter
))
9964 return END_BUILTINS
;
9966 arg
= next_const_call_expr_arg (&iter
);
9967 argtype
= TREE_TYPE (arg
);
9969 if (SCALAR_FLOAT_TYPE_P (parmtype
))
9971 if (! SCALAR_FLOAT_TYPE_P (argtype
))
9972 return END_BUILTINS
;
9974 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
9976 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
9977 return END_BUILTINS
;
9979 else if (POINTER_TYPE_P (parmtype
))
9981 if (! POINTER_TYPE_P (argtype
))
9982 return END_BUILTINS
;
9984 else if (INTEGRAL_TYPE_P (parmtype
))
9986 if (! INTEGRAL_TYPE_P (argtype
))
9987 return END_BUILTINS
;
9990 return END_BUILTINS
;
9993 /* Variable-length argument list. */
9994 return DECL_FUNCTION_CODE (fndecl
);
9997 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9998 evaluate to a constant. */
10001 fold_builtin_constant_p (tree arg
)
10003 /* We return 1 for a numeric type that's known to be a constant
10004 value at compile-time or for an aggregate type that's a
10005 literal constant. */
10008 /* If we know this is a constant, emit the constant of one. */
10009 if (CONSTANT_CLASS_P (arg
)
10010 || (TREE_CODE (arg
) == CONSTRUCTOR
10011 && TREE_CONSTANT (arg
)))
10012 return integer_one_node
;
10013 if (TREE_CODE (arg
) == ADDR_EXPR
)
10015 tree op
= TREE_OPERAND (arg
, 0);
10016 if (TREE_CODE (op
) == STRING_CST
10017 || (TREE_CODE (op
) == ARRAY_REF
10018 && integer_zerop (TREE_OPERAND (op
, 1))
10019 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
10020 return integer_one_node
;
10023 /* If this expression has side effects, show we don't know it to be a
10024 constant. Likewise if it's a pointer or aggregate type since in
10025 those case we only want literals, since those are only optimized
10026 when generating RTL, not later.
10027 And finally, if we are compiling an initializer, not code, we
10028 need to return a definite result now; there's not going to be any
10029 more optimization done. */
10030 if (TREE_SIDE_EFFECTS (arg
)
10031 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
10032 || POINTER_TYPE_P (TREE_TYPE (arg
))
10034 || folding_initializer
10035 || force_folding_builtin_constant_p
)
10036 return integer_zero_node
;
10041 /* Create builtin_expect or builtin_expect_with_probability
10042 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10043 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10044 builtin_expect_with_probability instead uses third argument as PROBABILITY
10048 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
10049 tree predictor
, tree probability
)
10051 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
10053 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
10054 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
10055 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
10056 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
10057 pred_type
= TREE_VALUE (arg_types
);
10058 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
10060 pred
= fold_convert_loc (loc
, pred_type
, pred
);
10061 expected
= fold_convert_loc (loc
, expected_type
, expected
);
10064 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
10066 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
10069 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
10070 build_int_cst (ret_type
, 0));
10073 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
10074 NULL_TREE if no simplification is possible. */
10077 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
10080 tree inner
, fndecl
, inner_arg0
;
10081 enum tree_code code
;
10083 /* Distribute the expected value over short-circuiting operators.
10084 See through the cast from truthvalue_type_node to long. */
10086 while (CONVERT_EXPR_P (inner_arg0
)
10087 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
10088 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
10089 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
10091 /* If this is a builtin_expect within a builtin_expect keep the
10092 inner one. See through a comparison against a constant. It
10093 might have been added to create a thruthvalue. */
10094 inner
= inner_arg0
;
10096 if (COMPARISON_CLASS_P (inner
)
10097 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
10098 inner
= TREE_OPERAND (inner
, 0);
10100 if (TREE_CODE (inner
) == CALL_EXPR
10101 && (fndecl
= get_callee_fndecl (inner
))
10102 && (fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
10103 || fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT_WITH_PROBABILITY
)))
10106 inner
= inner_arg0
;
10107 code
= TREE_CODE (inner
);
10108 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
10110 tree op0
= TREE_OPERAND (inner
, 0);
10111 tree op1
= TREE_OPERAND (inner
, 1);
10112 arg1
= save_expr (arg1
);
10114 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
10115 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
10116 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
10118 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
10121 /* If the argument isn't invariant then there's nothing else we can do. */
10122 if (!TREE_CONSTANT (inner_arg0
))
10125 /* If we expect that a comparison against the argument will fold to
10126 a constant return the constant. In practice, this means a true
10127 constant or the address of a non-weak symbol. */
10128 inner
= inner_arg0
;
10129 STRIP_NOPS (inner
);
10130 if (TREE_CODE (inner
) == ADDR_EXPR
)
10134 inner
= TREE_OPERAND (inner
, 0);
10136 while (TREE_CODE (inner
) == COMPONENT_REF
10137 || TREE_CODE (inner
) == ARRAY_REF
);
10138 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
10142 /* Otherwise, ARG0 already has the proper type for the return value. */
10146 /* Fold a call to __builtin_classify_type with argument ARG. */
10149 fold_builtin_classify_type (tree arg
)
10152 return build_int_cst (integer_type_node
, no_type_class
);
10154 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
10157 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10161 fold_builtin_strlen (location_t loc
, tree expr
, tree type
, tree arg
)
10163 if (!validate_arg (arg
, POINTER_TYPE
))
10167 c_strlen_data lendata
= { };
10168 tree len
= c_strlen (arg
, 0, &lendata
);
10171 return fold_convert_loc (loc
, type
, len
);
10174 c_strlen (arg
, 1, &lendata
);
10178 if (EXPR_HAS_LOCATION (arg
))
10179 loc
= EXPR_LOCATION (arg
);
10180 else if (loc
== UNKNOWN_LOCATION
)
10181 loc
= input_location
;
10182 warn_string_no_nul (loc
, expr
, "strlen", arg
, lendata
.decl
);
10189 /* Fold a call to __builtin_inf or __builtin_huge_val. */
10192 fold_builtin_inf (location_t loc
, tree type
, int warn
)
10194 REAL_VALUE_TYPE real
;
10196 /* __builtin_inff is intended to be usable to define INFINITY on all
10197 targets. If an infinity is not available, INFINITY expands "to a
10198 positive constant of type float that overflows at translation
10199 time", footnote "In this case, using INFINITY will violate the
10200 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10201 Thus we pedwarn to ensure this constraint violation is
10203 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
10204 pedwarn (loc
, 0, "target format does not support infinity");
10207 return build_real (type
, real
);
10210 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
10211 NULL_TREE if no simplification can be made. */
10214 fold_builtin_sincos (location_t loc
,
10215 tree arg0
, tree arg1
, tree arg2
)
10218 tree fndecl
, call
= NULL_TREE
;
10220 if (!validate_arg (arg0
, REAL_TYPE
)
10221 || !validate_arg (arg1
, POINTER_TYPE
)
10222 || !validate_arg (arg2
, POINTER_TYPE
))
10225 type
= TREE_TYPE (arg0
);
10227 /* Calculate the result when the argument is a constant. */
10228 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
10229 if (fn
== END_BUILTINS
)
10232 /* Canonicalize sincos to cexpi. */
10233 if (TREE_CODE (arg0
) == REAL_CST
)
10235 tree complex_type
= build_complex_type (type
);
10236 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
10240 if (!targetm
.libc_has_function (function_c99_math_complex
, type
)
10241 || !builtin_decl_implicit_p (fn
))
10243 fndecl
= builtin_decl_explicit (fn
);
10244 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
10245 call
= builtin_save_expr (call
);
10248 tree ptype
= build_pointer_type (type
);
10249 arg1
= fold_convert (ptype
, arg1
);
10250 arg2
= fold_convert (ptype
, arg2
);
10251 return build2 (COMPOUND_EXPR
, void_type_node
,
10252 build2 (MODIFY_EXPR
, void_type_node
,
10253 build_fold_indirect_ref_loc (loc
, arg1
),
10254 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
10255 build2 (MODIFY_EXPR
, void_type_node
,
10256 build_fold_indirect_ref_loc (loc
, arg2
),
10257 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
10260 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
10261 Return NULL_TREE if no simplification can be made. */
10264 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
10266 if (!validate_arg (arg1
, POINTER_TYPE
)
10267 || !validate_arg (arg2
, POINTER_TYPE
)
10268 || !validate_arg (len
, INTEGER_TYPE
))
10271 /* If the LEN parameter is zero, return zero. */
10272 if (integer_zerop (len
))
10273 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
10276 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
10277 if (operand_equal_p (arg1
, arg2
, 0))
10278 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
10280 /* If len parameter is one, return an expression corresponding to
10281 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
10282 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
10284 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
10285 tree cst_uchar_ptr_node
10286 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
10289 = fold_convert_loc (loc
, integer_type_node
,
10290 build1 (INDIRECT_REF
, cst_uchar_node
,
10291 fold_convert_loc (loc
,
10292 cst_uchar_ptr_node
,
10295 = fold_convert_loc (loc
, integer_type_node
,
10296 build1 (INDIRECT_REF
, cst_uchar_node
,
10297 fold_convert_loc (loc
,
10298 cst_uchar_ptr_node
,
10300 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
10306 /* Fold a call to builtin isascii with argument ARG. */
10309 fold_builtin_isascii (location_t loc
, tree arg
)
10311 if (!validate_arg (arg
, INTEGER_TYPE
))
10315 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
10316 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
10317 build_int_cst (integer_type_node
,
10318 ~ (unsigned HOST_WIDE_INT
) 0x7f));
10319 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
10320 arg
, integer_zero_node
);
10324 /* Fold a call to builtin toascii with argument ARG. */
10327 fold_builtin_toascii (location_t loc
, tree arg
)
10329 if (!validate_arg (arg
, INTEGER_TYPE
))
10332 /* Transform toascii(c) -> (c & 0x7f). */
10333 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
10334 build_int_cst (integer_type_node
, 0x7f));
10337 /* Fold a call to builtin isdigit with argument ARG. */
10340 fold_builtin_isdigit (location_t loc
, tree arg
)
10342 if (!validate_arg (arg
, INTEGER_TYPE
))
10346 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
10347 /* According to the C standard, isdigit is unaffected by locale.
10348 However, it definitely is affected by the target character set. */
10349 unsigned HOST_WIDE_INT target_digit0
10350 = lang_hooks
.to_target_charset ('0');
10352 if (target_digit0
== 0)
10355 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
10356 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
10357 build_int_cst (unsigned_type_node
, target_digit0
));
10358 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
10359 build_int_cst (unsigned_type_node
, 9));
10363 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
10366 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
10368 if (!validate_arg (arg
, REAL_TYPE
))
10371 arg
= fold_convert_loc (loc
, type
, arg
);
10372 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
10375 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
10378 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
10380 if (!validate_arg (arg
, INTEGER_TYPE
))
10383 arg
= fold_convert_loc (loc
, type
, arg
);
10384 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
10387 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
10390 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
10392 if (validate_arg (arg
, COMPLEX_TYPE
)
10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
10395 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
10399 tree new_arg
= builtin_save_expr (arg
);
10400 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
10401 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
10402 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
10409 /* Fold a call to builtin frexp, we can assume the base is 2. */
10412 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
10414 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
10419 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
10422 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
10424 /* Proceed if a valid pointer type was passed in. */
10425 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
10427 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
10433 /* For +-0, return (*exp = 0, +-0). */
10434 exp
= integer_zero_node
;
10439 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
10440 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
10443 /* Since the frexp function always expects base 2, and in
10444 GCC normalized significands are already in the range
10445 [0.5, 1.0), we have exactly what frexp wants. */
10446 REAL_VALUE_TYPE frac_rvt
= *value
;
10447 SET_REAL_EXP (&frac_rvt
, 0);
10448 frac
= build_real (rettype
, frac_rvt
);
10449 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
10453 gcc_unreachable ();
10456 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10457 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
10458 TREE_SIDE_EFFECTS (arg1
) = 1;
10459 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
10465 /* Fold a call to builtin modf. */
10468 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
10470 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
10475 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
10478 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
10480 /* Proceed if a valid pointer type was passed in. */
10481 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
10483 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
10484 REAL_VALUE_TYPE trunc
, frac
;
10490 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10491 trunc
= frac
= *value
;
10494 /* For +-Inf, return (*arg1 = arg0, +-0). */
10496 frac
.sign
= value
->sign
;
10500 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10501 real_trunc (&trunc
, VOIDmode
, value
);
10502 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
10503 /* If the original number was negative and already
10504 integral, then the fractional part is -0.0. */
10505 if (value
->sign
&& frac
.cl
== rvc_zero
)
10506 frac
.sign
= value
->sign
;
10510 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10511 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
10512 build_real (rettype
, trunc
));
10513 TREE_SIDE_EFFECTS (arg1
) = 1;
10514 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
10515 build_real (rettype
, frac
));
10521 /* Given a location LOC, an interclass builtin function decl FNDECL
10522 and its single argument ARG, return an folded expression computing
10523 the same, or NULL_TREE if we either couldn't or didn't want to fold
10524 (the latter happen if there's an RTL instruction available). */
10527 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
10531 if (!validate_arg (arg
, REAL_TYPE
))
10534 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
10537 mode
= TYPE_MODE (TREE_TYPE (arg
));
10539 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
10541 /* If there is no optab, try generic code. */
10542 switch (DECL_FUNCTION_CODE (fndecl
))
10546 CASE_FLT_FN (BUILT_IN_ISINF
):
10548 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10549 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
10550 tree type
= TREE_TYPE (arg
);
10554 if (is_ibm_extended
)
10556 /* NaN and Inf are encoded in the high-order double value
10557 only. The low-order value is not significant. */
10558 type
= double_type_node
;
10560 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
10562 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
10563 real_from_string (&r
, buf
);
10564 result
= build_call_expr (isgr_fn
, 2,
10565 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
10566 build_real (type
, r
));
10569 CASE_FLT_FN (BUILT_IN_FINITE
):
10570 case BUILT_IN_ISFINITE
:
10572 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10573 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10574 tree type
= TREE_TYPE (arg
);
10578 if (is_ibm_extended
)
10580 /* NaN and Inf are encoded in the high-order double value
10581 only. The low-order value is not significant. */
10582 type
= double_type_node
;
10584 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
10586 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
10587 real_from_string (&r
, buf
);
10588 result
= build_call_expr (isle_fn
, 2,
10589 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
10590 build_real (type
, r
));
10591 /*result = fold_build2_loc (loc, UNGT_EXPR,
10592 TREE_TYPE (TREE_TYPE (fndecl)),
10593 fold_build1_loc (loc, ABS_EXPR, type, arg),
10594 build_real (type, r));
10595 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10596 TREE_TYPE (TREE_TYPE (fndecl)),
10600 case BUILT_IN_ISNORMAL
:
10602 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10603 islessequal(fabs(x),DBL_MAX). */
10604 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10605 tree type
= TREE_TYPE (arg
);
10606 tree orig_arg
, max_exp
, min_exp
;
10607 machine_mode orig_mode
= mode
;
10608 REAL_VALUE_TYPE rmax
, rmin
;
10611 orig_arg
= arg
= builtin_save_expr (arg
);
10612 if (is_ibm_extended
)
10614 /* Use double to test the normal range of IBM extended
10615 precision. Emin for IBM extended precision is
10616 different to emin for IEEE double, being 53 higher
10617 since the low double exponent is at least 53 lower
10618 than the high double exponent. */
10619 type
= double_type_node
;
10621 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
10623 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
10625 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
10626 real_from_string (&rmax
, buf
);
10627 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
10628 real_from_string (&rmin
, buf
);
10629 max_exp
= build_real (type
, rmax
);
10630 min_exp
= build_real (type
, rmin
);
10632 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
10633 if (is_ibm_extended
)
10635 /* Testing the high end of the range is done just using
10636 the high double, using the same test as isfinite().
10637 For the subnormal end of the range we first test the
10638 high double, then if its magnitude is equal to the
10639 limit of 0x1p-969, we test whether the low double is
10640 non-zero and opposite sign to the high double. */
10641 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
10642 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
10643 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
10644 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
10646 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
10647 complex_double_type_node
, orig_arg
);
10648 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
10649 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
10650 tree zero
= build_real (type
, dconst0
);
10651 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
10652 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
10653 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
10654 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
10655 fold_build3 (COND_EXPR
,
10657 hilt
, logt
, lolt
));
10658 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
10660 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
10666 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
10667 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
10669 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
10680 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10681 ARG is the argument for the call. */
10684 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
10686 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10688 if (!validate_arg (arg
, REAL_TYPE
))
10691 switch (builtin_index
)
10693 case BUILT_IN_ISINF
:
10694 if (tree_expr_infinite_p (arg
))
10695 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10696 if (!tree_expr_maybe_infinite_p (arg
))
10697 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10700 case BUILT_IN_ISINF_SIGN
:
10702 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10703 /* In a boolean context, GCC will fold the inner COND_EXPR to
10704 1. So e.g. "if (isinf_sign(x))" would be folded to just
10705 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10706 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
10707 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
10708 tree tmp
= NULL_TREE
;
10710 arg
= builtin_save_expr (arg
);
10712 if (signbit_fn
&& isinf_fn
)
10714 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
10715 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
10717 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10718 signbit_call
, integer_zero_node
);
10719 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10720 isinf_call
, integer_zero_node
);
10722 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
10723 integer_minus_one_node
, integer_one_node
);
10724 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10726 integer_zero_node
);
10732 case BUILT_IN_ISFINITE
:
10733 if (tree_expr_finite_p (arg
))
10734 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10735 if (tree_expr_nan_p (arg
) || tree_expr_infinite_p (arg
))
10736 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10739 case BUILT_IN_ISNAN
:
10740 if (tree_expr_nan_p (arg
))
10741 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10742 if (!tree_expr_maybe_nan_p (arg
))
10743 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10746 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
10747 if (is_ibm_extended
)
10749 /* NaN and Inf are encoded in the high-order double value
10750 only. The low-order value is not significant. */
10751 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
10754 arg
= builtin_save_expr (arg
);
10755 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
10758 gcc_unreachable ();
10762 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10763 This builtin will generate code to return the appropriate floating
10764 point classification depending on the value of the floating point
10765 number passed in. The possible return values must be supplied as
10766 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10767 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10768 one floating point argument which is "type generic". */
10771 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
10773 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10774 arg
, type
, res
, tmp
;
10779 /* Verify the required arguments in the original call. */
10781 || !validate_arg (args
[0], INTEGER_TYPE
)
10782 || !validate_arg (args
[1], INTEGER_TYPE
)
10783 || !validate_arg (args
[2], INTEGER_TYPE
)
10784 || !validate_arg (args
[3], INTEGER_TYPE
)
10785 || !validate_arg (args
[4], INTEGER_TYPE
)
10786 || !validate_arg (args
[5], REAL_TYPE
))
10790 fp_infinite
= args
[1];
10791 fp_normal
= args
[2];
10792 fp_subnormal
= args
[3];
10795 type
= TREE_TYPE (arg
);
10796 mode
= TYPE_MODE (type
);
10797 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10799 /* fpclassify(x) ->
10800 isnan(x) ? FP_NAN :
10801 (fabs(x) == Inf ? FP_INFINITE :
10802 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10803 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10805 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10806 build_real (type
, dconst0
));
10807 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10808 tmp
, fp_zero
, fp_subnormal
);
10810 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10811 real_from_string (&r
, buf
);
10812 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10813 arg
, build_real (type
, r
));
10814 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10816 if (tree_expr_maybe_infinite_p (arg
))
10819 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10820 build_real (type
, r
));
10821 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10825 if (tree_expr_maybe_nan_p (arg
))
10827 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10828 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10834 /* Fold a call to an unordered comparison function such as
10835 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10836 being called and ARG0 and ARG1 are the arguments for the call.
10837 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10838 the opposite of the desired result. UNORDERED_CODE is used
10839 for modes that can hold NaNs and ORDERED_CODE is used for
10843 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10844 enum tree_code unordered_code
,
10845 enum tree_code ordered_code
)
10847 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10848 enum tree_code code
;
10850 enum tree_code code0
, code1
;
10851 tree cmp_type
= NULL_TREE
;
10853 type0
= TREE_TYPE (arg0
);
10854 type1
= TREE_TYPE (arg1
);
10856 code0
= TREE_CODE (type0
);
10857 code1
= TREE_CODE (type1
);
10859 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10860 /* Choose the wider of two real types. */
10861 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10863 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10865 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10868 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10869 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10871 if (unordered_code
== UNORDERED_EXPR
)
10873 if (tree_expr_nan_p (arg0
) || tree_expr_nan_p (arg1
))
10874 return omit_two_operands_loc (loc
, type
, integer_one_node
, arg0
, arg1
);
10875 if (!tree_expr_maybe_nan_p (arg0
) && !tree_expr_maybe_nan_p (arg1
))
10876 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10877 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10880 code
= (tree_expr_maybe_nan_p (arg0
) || tree_expr_maybe_nan_p (arg1
))
10881 ? unordered_code
: ordered_code
;
10882 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10883 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10886 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10887 arithmetics if it can never overflow, or into internal functions that
10888 return both result of arithmetics and overflowed boolean flag in
10889 a complex integer result, or some other check for overflow.
10890 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10891 checking part of that. */
10894 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
10895 tree arg0
, tree arg1
, tree arg2
)
10897 enum internal_fn ifn
= IFN_LAST
;
10898 /* The code of the expression corresponding to the built-in. */
10899 enum tree_code opcode
= ERROR_MARK
;
10900 bool ovf_only
= false;
10904 case BUILT_IN_ADD_OVERFLOW_P
:
10907 case BUILT_IN_ADD_OVERFLOW
:
10908 case BUILT_IN_SADD_OVERFLOW
:
10909 case BUILT_IN_SADDL_OVERFLOW
:
10910 case BUILT_IN_SADDLL_OVERFLOW
:
10911 case BUILT_IN_UADD_OVERFLOW
:
10912 case BUILT_IN_UADDL_OVERFLOW
:
10913 case BUILT_IN_UADDLL_OVERFLOW
:
10914 opcode
= PLUS_EXPR
;
10915 ifn
= IFN_ADD_OVERFLOW
;
10917 case BUILT_IN_SUB_OVERFLOW_P
:
10920 case BUILT_IN_SUB_OVERFLOW
:
10921 case BUILT_IN_SSUB_OVERFLOW
:
10922 case BUILT_IN_SSUBL_OVERFLOW
:
10923 case BUILT_IN_SSUBLL_OVERFLOW
:
10924 case BUILT_IN_USUB_OVERFLOW
:
10925 case BUILT_IN_USUBL_OVERFLOW
:
10926 case BUILT_IN_USUBLL_OVERFLOW
:
10927 opcode
= MINUS_EXPR
;
10928 ifn
= IFN_SUB_OVERFLOW
;
10930 case BUILT_IN_MUL_OVERFLOW_P
:
10933 case BUILT_IN_MUL_OVERFLOW
:
10934 case BUILT_IN_SMUL_OVERFLOW
:
10935 case BUILT_IN_SMULL_OVERFLOW
:
10936 case BUILT_IN_SMULLL_OVERFLOW
:
10937 case BUILT_IN_UMUL_OVERFLOW
:
10938 case BUILT_IN_UMULL_OVERFLOW
:
10939 case BUILT_IN_UMULLL_OVERFLOW
:
10940 opcode
= MULT_EXPR
;
10941 ifn
= IFN_MUL_OVERFLOW
;
10944 gcc_unreachable ();
10947 /* For the "generic" overloads, the first two arguments can have different
10948 types and the last argument determines the target type to use to check
10949 for overflow. The arguments of the other overloads all have the same
10951 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
10953 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10954 arguments are constant, attempt to fold the built-in call into a constant
10955 expression indicating whether or not it detected an overflow. */
10957 && TREE_CODE (arg0
) == INTEGER_CST
10958 && TREE_CODE (arg1
) == INTEGER_CST
)
10959 /* Perform the computation in the target type and check for overflow. */
10960 return omit_one_operand_loc (loc
, boolean_type_node
,
10961 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
10962 ? boolean_true_node
: boolean_false_node
,
10965 tree intres
, ovfres
;
10966 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10968 intres
= fold_binary_loc (loc
, opcode
, type
,
10969 fold_convert_loc (loc
, type
, arg0
),
10970 fold_convert_loc (loc
, type
, arg1
));
10971 if (TREE_OVERFLOW (intres
))
10972 intres
= drop_tree_overflow (intres
);
10973 ovfres
= (arith_overflowed_p (opcode
, type
, arg0
, arg1
)
10974 ? boolean_true_node
: boolean_false_node
);
10978 tree ctype
= build_complex_type (type
);
10979 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
10981 tree tgt
= save_expr (call
);
10982 intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
10983 ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
10984 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
10988 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
10990 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
10992 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
10993 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
10996 /* Fold a call to __builtin_FILE to a constant string. */
10999 fold_builtin_FILE (location_t loc
)
11001 if (const char *fname
= LOCATION_FILE (loc
))
11003 /* The documentation says this builtin is equivalent to the preprocessor
11004 __FILE__ macro so it appears appropriate to use the same file prefix
11006 fname
= remap_macro_filename (fname
);
11007 return build_string_literal (strlen (fname
) + 1, fname
);
11010 return build_string_literal (1, "");
11013 /* Fold a call to __builtin_FUNCTION to a constant string. */
11016 fold_builtin_FUNCTION ()
11018 const char *name
= "";
11020 if (current_function_decl
)
11021 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
11023 return build_string_literal (strlen (name
) + 1, name
);
11026 /* Fold a call to __builtin_LINE to an integer constant. */
11029 fold_builtin_LINE (location_t loc
, tree type
)
11031 return build_int_cst (type
, LOCATION_LINE (loc
));
11034 /* Fold a call to built-in function FNDECL with 0 arguments.
11035 This function returns NULL_TREE if no simplification was possible. */
11038 fold_builtin_0 (location_t loc
, tree fndecl
)
11040 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
11041 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11044 case BUILT_IN_FILE
:
11045 return fold_builtin_FILE (loc
);
11047 case BUILT_IN_FUNCTION
:
11048 return fold_builtin_FUNCTION ();
11050 case BUILT_IN_LINE
:
11051 return fold_builtin_LINE (loc
, type
);
11053 CASE_FLT_FN (BUILT_IN_INF
):
11054 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
11055 case BUILT_IN_INFD32
:
11056 case BUILT_IN_INFD64
:
11057 case BUILT_IN_INFD128
:
11058 return fold_builtin_inf (loc
, type
, true);
11060 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
11061 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
11062 return fold_builtin_inf (loc
, type
, false);
11064 case BUILT_IN_CLASSIFY_TYPE
:
11065 return fold_builtin_classify_type (NULL_TREE
);
11073 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11074 This function returns NULL_TREE if no simplification was possible. */
11077 fold_builtin_1 (location_t loc
, tree expr
, tree fndecl
, tree arg0
)
11079 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
11080 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11082 if (TREE_CODE (arg0
) == ERROR_MARK
)
11085 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
11090 case BUILT_IN_CONSTANT_P
:
11092 tree val
= fold_builtin_constant_p (arg0
);
11094 /* Gimplification will pull the CALL_EXPR for the builtin out of
11095 an if condition. When not optimizing, we'll not CSE it back.
11096 To avoid link error types of regressions, return false now. */
11097 if (!val
&& !optimize
)
11098 val
= integer_zero_node
;
11103 case BUILT_IN_CLASSIFY_TYPE
:
11104 return fold_builtin_classify_type (arg0
);
11106 case BUILT_IN_STRLEN
:
11107 return fold_builtin_strlen (loc
, expr
, type
, arg0
);
11109 CASE_FLT_FN (BUILT_IN_FABS
):
11110 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
11111 case BUILT_IN_FABSD32
:
11112 case BUILT_IN_FABSD64
:
11113 case BUILT_IN_FABSD128
:
11114 return fold_builtin_fabs (loc
, arg0
, type
);
11117 case BUILT_IN_LABS
:
11118 case BUILT_IN_LLABS
:
11119 case BUILT_IN_IMAXABS
:
11120 return fold_builtin_abs (loc
, arg0
, type
);
11122 CASE_FLT_FN (BUILT_IN_CONJ
):
11123 if (validate_arg (arg0
, COMPLEX_TYPE
)
11124 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
11125 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
11128 CASE_FLT_FN (BUILT_IN_CREAL
):
11129 if (validate_arg (arg0
, COMPLEX_TYPE
)
11130 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
11131 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
11134 CASE_FLT_FN (BUILT_IN_CIMAG
):
11135 if (validate_arg (arg0
, COMPLEX_TYPE
)
11136 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
11137 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
11140 CASE_FLT_FN (BUILT_IN_CARG
):
11141 return fold_builtin_carg (loc
, arg0
, type
);
11143 case BUILT_IN_ISASCII
:
11144 return fold_builtin_isascii (loc
, arg0
);
11146 case BUILT_IN_TOASCII
:
11147 return fold_builtin_toascii (loc
, arg0
);
11149 case BUILT_IN_ISDIGIT
:
11150 return fold_builtin_isdigit (loc
, arg0
);
11152 CASE_FLT_FN (BUILT_IN_FINITE
):
11153 case BUILT_IN_FINITED32
:
11154 case BUILT_IN_FINITED64
:
11155 case BUILT_IN_FINITED128
:
11156 case BUILT_IN_ISFINITE
:
11158 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
11161 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
11164 CASE_FLT_FN (BUILT_IN_ISINF
):
11165 case BUILT_IN_ISINFD32
:
11166 case BUILT_IN_ISINFD64
:
11167 case BUILT_IN_ISINFD128
:
11169 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
11172 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
11175 case BUILT_IN_ISNORMAL
:
11176 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
11178 case BUILT_IN_ISINF_SIGN
:
11179 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
11181 CASE_FLT_FN (BUILT_IN_ISNAN
):
11182 case BUILT_IN_ISNAND32
:
11183 case BUILT_IN_ISNAND64
:
11184 case BUILT_IN_ISNAND128
:
11185 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
11187 case BUILT_IN_FREE
:
11188 if (integer_zerop (arg0
))
11189 return build_empty_stmt (loc
);
11200 /* Folds a call EXPR (which may be null) to built-in function FNDECL
11201 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11202 if no simplification was possible. */
11205 fold_builtin_2 (location_t loc
, tree expr
, tree fndecl
, tree arg0
, tree arg1
)
11207 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
11208 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11210 if (TREE_CODE (arg0
) == ERROR_MARK
11211 || TREE_CODE (arg1
) == ERROR_MARK
)
11214 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
11219 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
11220 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
11221 if (validate_arg (arg0
, REAL_TYPE
)
11222 && validate_arg (arg1
, POINTER_TYPE
))
11223 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
11226 CASE_FLT_FN (BUILT_IN_FREXP
):
11227 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
11229 CASE_FLT_FN (BUILT_IN_MODF
):
11230 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
11232 case BUILT_IN_STRSPN
:
11233 return fold_builtin_strspn (loc
, expr
, arg0
, arg1
);
11235 case BUILT_IN_STRCSPN
:
11236 return fold_builtin_strcspn (loc
, expr
, arg0
, arg1
);
11238 case BUILT_IN_STRPBRK
:
11239 return fold_builtin_strpbrk (loc
, expr
, arg0
, arg1
, type
);
11241 case BUILT_IN_EXPECT
:
11242 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
11244 case BUILT_IN_ISGREATER
:
11245 return fold_builtin_unordered_cmp (loc
, fndecl
,
11246 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
11247 case BUILT_IN_ISGREATEREQUAL
:
11248 return fold_builtin_unordered_cmp (loc
, fndecl
,
11249 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
11250 case BUILT_IN_ISLESS
:
11251 return fold_builtin_unordered_cmp (loc
, fndecl
,
11252 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
11253 case BUILT_IN_ISLESSEQUAL
:
11254 return fold_builtin_unordered_cmp (loc
, fndecl
,
11255 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
11256 case BUILT_IN_ISLESSGREATER
:
11257 return fold_builtin_unordered_cmp (loc
, fndecl
,
11258 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
11259 case BUILT_IN_ISUNORDERED
:
11260 return fold_builtin_unordered_cmp (loc
, fndecl
,
11261 arg0
, arg1
, UNORDERED_EXPR
,
11264 /* We do the folding for va_start in the expander. */
11265 case BUILT_IN_VA_START
:
11268 case BUILT_IN_OBJECT_SIZE
:
11269 return fold_builtin_object_size (arg0
, arg1
);
11271 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
11272 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
11274 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
11275 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
11283 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
11285 This function returns NULL_TREE if no simplification was possible. */
11288 fold_builtin_3 (location_t loc
, tree fndecl
,
11289 tree arg0
, tree arg1
, tree arg2
)
11291 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
11292 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11294 if (TREE_CODE (arg0
) == ERROR_MARK
11295 || TREE_CODE (arg1
) == ERROR_MARK
11296 || TREE_CODE (arg2
) == ERROR_MARK
)
11299 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
11306 CASE_FLT_FN (BUILT_IN_SINCOS
):
11307 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
11309 CASE_FLT_FN (BUILT_IN_REMQUO
):
11310 if (validate_arg (arg0
, REAL_TYPE
)
11311 && validate_arg (arg1
, REAL_TYPE
)
11312 && validate_arg (arg2
, POINTER_TYPE
))
11313 return do_mpfr_remquo (arg0
, arg1
, arg2
);
11316 case BUILT_IN_MEMCMP
:
11317 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
11319 case BUILT_IN_EXPECT
:
11320 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
11322 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
11323 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
11325 case BUILT_IN_ADD_OVERFLOW
:
11326 case BUILT_IN_SUB_OVERFLOW
:
11327 case BUILT_IN_MUL_OVERFLOW
:
11328 case BUILT_IN_ADD_OVERFLOW_P
:
11329 case BUILT_IN_SUB_OVERFLOW_P
:
11330 case BUILT_IN_MUL_OVERFLOW_P
:
11331 case BUILT_IN_SADD_OVERFLOW
:
11332 case BUILT_IN_SADDL_OVERFLOW
:
11333 case BUILT_IN_SADDLL_OVERFLOW
:
11334 case BUILT_IN_SSUB_OVERFLOW
:
11335 case BUILT_IN_SSUBL_OVERFLOW
:
11336 case BUILT_IN_SSUBLL_OVERFLOW
:
11337 case BUILT_IN_SMUL_OVERFLOW
:
11338 case BUILT_IN_SMULL_OVERFLOW
:
11339 case BUILT_IN_SMULLL_OVERFLOW
:
11340 case BUILT_IN_UADD_OVERFLOW
:
11341 case BUILT_IN_UADDL_OVERFLOW
:
11342 case BUILT_IN_UADDLL_OVERFLOW
:
11343 case BUILT_IN_USUB_OVERFLOW
:
11344 case BUILT_IN_USUBL_OVERFLOW
:
11345 case BUILT_IN_USUBLL_OVERFLOW
:
11346 case BUILT_IN_UMUL_OVERFLOW
:
11347 case BUILT_IN_UMULL_OVERFLOW
:
11348 case BUILT_IN_UMULLL_OVERFLOW
:
11349 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
11357 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
11358 ARGS is an array of NARGS arguments. IGNORE is true if the result
11359 of the function call is ignored. This function returns NULL_TREE
11360 if no simplification was possible. */
11363 fold_builtin_n (location_t loc
, tree expr
, tree fndecl
, tree
*args
,
11366 tree ret
= NULL_TREE
;
11371 ret
= fold_builtin_0 (loc
, fndecl
);
11374 ret
= fold_builtin_1 (loc
, expr
, fndecl
, args
[0]);
11377 ret
= fold_builtin_2 (loc
, expr
, fndecl
, args
[0], args
[1]);
11380 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
11383 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
11388 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11389 SET_EXPR_LOCATION (ret
, loc
);
11395 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11396 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11397 of arguments in ARGS to be omitted. OLDNARGS is the number of
11398 elements in ARGS. */
11401 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11402 int skip
, tree fndecl
, int n
, va_list newargs
)
11404 int nargs
= oldnargs
- skip
+ n
;
11411 buffer
= XALLOCAVEC (tree
, nargs
);
11412 for (i
= 0; i
< n
; i
++)
11413 buffer
[i
] = va_arg (newargs
, tree
);
11414 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11415 buffer
[i
] = args
[j
];
11418 buffer
= args
+ skip
;
11420 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11423 /* Return true if FNDECL shouldn't be folded right now.
11424 If a built-in function has an inline attribute always_inline
11425 wrapper, defer folding it after always_inline functions have
11426 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11427 might not be performed. */
11430 avoid_folding_inline_builtin (tree fndecl
)
11432 return (DECL_DECLARED_INLINE_P (fndecl
)
11433 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11435 && !cfun
->always_inline_functions_inlined
11436 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11439 /* A wrapper function for builtin folding that prevents warnings for
11440 "statement without effect" and the like, caused by removing the
11441 call node earlier than the warning is generated. */
11444 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11446 tree ret
= NULL_TREE
;
11447 tree fndecl
= get_callee_fndecl (exp
);
11448 if (fndecl
&& fndecl_built_in_p (fndecl
)
11449 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11450 yet. Defer folding until we see all the arguments
11451 (after inlining). */
11452 && !CALL_EXPR_VA_ARG_PACK (exp
))
11454 int nargs
= call_expr_nargs (exp
);
11456 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11457 instead last argument is __builtin_va_arg_pack (). Defer folding
11458 even in that case, until arguments are finalized. */
11459 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11461 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11462 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
11466 if (avoid_folding_inline_builtin (fndecl
))
11469 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11470 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11471 CALL_EXPR_ARGP (exp
), ignore
);
11474 tree
*args
= CALL_EXPR_ARGP (exp
);
11475 ret
= fold_builtin_n (loc
, exp
, fndecl
, args
, nargs
, ignore
);
11483 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11484 N arguments are passed in the array ARGARRAY. Return a folded
11485 expression or NULL_TREE if no simplification was possible. */
11488 fold_builtin_call_array (location_t loc
, tree
,
11493 if (TREE_CODE (fn
) != ADDR_EXPR
)
11496 tree fndecl
= TREE_OPERAND (fn
, 0);
11497 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11498 && fndecl_built_in_p (fndecl
))
11500 /* If last argument is __builtin_va_arg_pack (), arguments to this
11501 function are not finalized yet. Defer folding until they are. */
11502 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11504 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11505 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
11508 if (avoid_folding_inline_builtin (fndecl
))
11510 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11511 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11513 return fold_builtin_n (loc
, NULL_TREE
, fndecl
, argarray
, n
, false);
11519 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11520 along with N new arguments specified as the "..." parameters. SKIP
11521 is the number of arguments in EXP to be omitted. This function is used
11522 to do varargs-to-varargs transformations. */
11525 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11531 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11532 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11538 /* Validate a single argument ARG against a tree code CODE representing
11539 a type. Return true when argument is valid. */
11542 validate_arg (const_tree arg
, enum tree_code code
)
11546 else if (code
== POINTER_TYPE
)
11547 return POINTER_TYPE_P (TREE_TYPE (arg
));
11548 else if (code
== INTEGER_TYPE
)
11549 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11550 return code
== TREE_CODE (TREE_TYPE (arg
));
11553 /* This function validates the types of a function call argument list
11554 against a specified list of tree_codes. If the last specifier is a 0,
11555 that represents an ellipses, otherwise the last specifier must be a
11558 This is the GIMPLE version of validate_arglist. Eventually we want to
11559 completely convert builtins.c to work from GIMPLEs and the tree based
11560 validate_arglist will then be removed. */
11563 validate_gimple_arglist (const gcall
*call
, ...)
11565 enum tree_code code
;
11571 va_start (ap
, call
);
11576 code
= (enum tree_code
) va_arg (ap
, int);
11580 /* This signifies an ellipses, any further arguments are all ok. */
11584 /* This signifies an endlink, if no arguments remain, return
11585 true, otherwise return false. */
11586 res
= (i
== gimple_call_num_args (call
));
11589 /* If no parameters remain or the parameter's code does not
11590 match the specified code, return false. Otherwise continue
11591 checking any remaining arguments. */
11592 arg
= gimple_call_arg (call
, i
++);
11593 if (!validate_arg (arg
, code
))
11600 /* We need gotos here since we can only have one VA_CLOSE in a
11608 /* Default target-specific builtin expander that does nothing. */
11611 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11612 rtx target ATTRIBUTE_UNUSED
,
11613 rtx subtarget ATTRIBUTE_UNUSED
,
11614 machine_mode mode ATTRIBUTE_UNUSED
,
11615 int ignore ATTRIBUTE_UNUSED
)
11620 /* Returns true is EXP represents data that would potentially reside
11621 in a readonly section. */
11624 readonly_data_expr (tree exp
)
11628 if (TREE_CODE (exp
) != ADDR_EXPR
)
11631 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11635 /* Make sure we call decl_readonly_section only for trees it
11636 can handle (since it returns true for everything it doesn't
11638 if (TREE_CODE (exp
) == STRING_CST
11639 || TREE_CODE (exp
) == CONSTRUCTOR
11640 || (VAR_P (exp
) && TREE_STATIC (exp
)))
11641 return decl_readonly_section (exp
, 0);
11646 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11647 to the call, and TYPE is its return type.
11649 Return NULL_TREE if no simplification was possible, otherwise return the
11650 simplified form of the call as a tree.
11652 The simplified form may be a constant or other expression which
11653 computes the same value, but in a more efficient manner (including
11654 calls to other builtin functions).
11656 The call may contain arguments which need to be evaluated, but
11657 which are not useful to determine the result of the call. In
11658 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11659 COMPOUND_EXPR will be an argument which must be evaluated.
11660 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11661 COMPOUND_EXPR in the chain will contain the tree for the simplified
11662 form of the builtin function call. */
11665 fold_builtin_strpbrk (location_t loc
, tree
, tree s1
, tree s2
, tree type
)
11667 if (!validate_arg (s1
, POINTER_TYPE
)
11668 || !validate_arg (s2
, POINTER_TYPE
))
11672 const char *p1
, *p2
;
11674 p2
= c_getstr (s2
);
11678 p1
= c_getstr (s1
);
11681 const char *r
= strpbrk (p1
, p2
);
11685 return build_int_cst (TREE_TYPE (s1
), 0);
11687 /* Return an offset into the constant string argument. */
11688 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11689 return fold_convert_loc (loc
, type
, tem
);
11693 /* strpbrk(x, "") == NULL.
11694 Evaluate and ignore s1 in case it had side-effects. */
11695 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
11698 return NULL_TREE
; /* Really call strpbrk. */
11700 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11704 /* New argument list transforming strpbrk(s1, s2) to
11705 strchr(s1, s2[0]). */
11706 return build_call_expr_loc (loc
, fn
, 2, s1
,
11707 build_int_cst (integer_type_node
, p2
[0]));
11710 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11713 Return NULL_TREE if no simplification was possible, otherwise return the
11714 simplified form of the call as a tree.
11716 The simplified form may be a constant or other expression which
11717 computes the same value, but in a more efficient manner (including
11718 calls to other builtin functions).
11720 The call may contain arguments which need to be evaluated, but
11721 which are not useful to determine the result of the call. In
11722 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11723 COMPOUND_EXPR will be an argument which must be evaluated.
11724 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11725 COMPOUND_EXPR in the chain will contain the tree for the simplified
11726 form of the builtin function call. */
11729 fold_builtin_strspn (location_t loc
, tree expr
, tree s1
, tree s2
)
11731 if (!validate_arg (s1
, POINTER_TYPE
)
11732 || !validate_arg (s2
, POINTER_TYPE
))
11735 if (!check_nul_terminated_array (expr
, s1
)
11736 || !check_nul_terminated_array (expr
, s2
))
11739 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11741 /* If either argument is "", return NULL_TREE. */
11742 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11743 /* Evaluate and ignore both arguments in case either one has
11745 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11750 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11753 Return NULL_TREE if no simplification was possible, otherwise return the
11754 simplified form of the call as a tree.
11756 The simplified form may be a constant or other expression which
11757 computes the same value, but in a more efficient manner (including
11758 calls to other builtin functions).
11760 The call may contain arguments which need to be evaluated, but
11761 which are not useful to determine the result of the call. In
11762 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11763 COMPOUND_EXPR will be an argument which must be evaluated.
11764 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11765 COMPOUND_EXPR in the chain will contain the tree for the simplified
11766 form of the builtin function call. */
11769 fold_builtin_strcspn (location_t loc
, tree expr
, tree s1
, tree s2
)
11771 if (!validate_arg (s1
, POINTER_TYPE
)
11772 || !validate_arg (s2
, POINTER_TYPE
))
11775 if (!check_nul_terminated_array (expr
, s1
)
11776 || !check_nul_terminated_array (expr
, s2
))
11779 /* If the first argument is "", return NULL_TREE. */
11780 const char *p1
= c_getstr (s1
);
11781 if (p1
&& *p1
== '\0')
11783 /* Evaluate and ignore argument s2 in case it has
11785 return omit_one_operand_loc (loc
, size_type_node
,
11786 size_zero_node
, s2
);
11789 /* If the second argument is "", return __builtin_strlen(s1). */
11790 const char *p2
= c_getstr (s2
);
11791 if (p2
&& *p2
== '\0')
11793 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11795 /* If the replacement _DECL isn't initialized, don't do the
11800 return build_call_expr_loc (loc
, fn
, 1, s1
);
11805 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11806 produced. False otherwise. This is done so that we don't output the error
11807 or warning twice or three times. */
11810 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11812 tree fntype
= TREE_TYPE (current_function_decl
);
11813 int nargs
= call_expr_nargs (exp
);
11815 /* There is good chance the current input_location points inside the
11816 definition of the va_start macro (perhaps on the token for
11817 builtin) in a system header, so warnings will not be emitted.
11818 Use the location in real source code. */
11819 location_t current_location
=
11820 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11823 if (!stdarg_p (fntype
))
11825 error ("%<va_start%> used in function with fixed arguments");
11831 if (va_start_p
&& (nargs
!= 2))
11833 error ("wrong number of arguments to function %<va_start%>");
11836 arg
= CALL_EXPR_ARG (exp
, 1);
11838 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11839 when we checked the arguments and if needed issued a warning. */
11844 /* Evidently an out of date version of <stdarg.h>; can't validate
11845 va_start's second argument, but can still work as intended. */
11846 warning_at (current_location
,
11848 "%<__builtin_next_arg%> called without an argument");
11851 else if (nargs
> 1)
11853 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11856 arg
= CALL_EXPR_ARG (exp
, 0);
11859 if (TREE_CODE (arg
) == SSA_NAME
)
11860 arg
= SSA_NAME_VAR (arg
);
11862 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11863 or __builtin_next_arg (0) the first time we see it, after checking
11864 the arguments and if needed issuing a warning. */
11865 if (!integer_zerop (arg
))
11867 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11869 /* Strip off all nops for the sake of the comparison. This
11870 is not quite the same as STRIP_NOPS. It does more.
11871 We must also strip off INDIRECT_EXPR for C++ reference
11873 while (CONVERT_EXPR_P (arg
)
11874 || TREE_CODE (arg
) == INDIRECT_REF
)
11875 arg
= TREE_OPERAND (arg
, 0);
11876 if (arg
!= last_parm
)
11878 /* FIXME: Sometimes with the tree optimizers we can get the
11879 not the last argument even though the user used the last
11880 argument. We just warn and set the arg to be the last
11881 argument so that we will get wrong-code because of
11883 warning_at (current_location
,
11885 "second parameter of %<va_start%> not last named argument");
11888 /* Undefined by C99 7.15.1.4p4 (va_start):
11889 "If the parameter parmN is declared with the register storage
11890 class, with a function or array type, or with a type that is
11891 not compatible with the type that results after application of
11892 the default argument promotions, the behavior is undefined."
11894 else if (DECL_REGISTER (arg
))
11896 warning_at (current_location
,
11898 "undefined behavior when second parameter of "
11899 "%<va_start%> is declared with %<register%> storage");
11902 /* We want to verify the second parameter just once before the tree
11903 optimizers are run and then avoid keeping it in the tree,
11904 as otherwise we could warn even for correct code like:
11905 void foo (int i, ...)
11906 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11908 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11910 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11916 /* Expand a call EXP to __builtin_object_size. */
11919 expand_builtin_object_size (tree exp
)
11922 int object_size_type
;
11923 tree fndecl
= get_callee_fndecl (exp
);
11925 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11927 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11929 expand_builtin_trap ();
11933 ost
= CALL_EXPR_ARG (exp
, 1);
11936 if (TREE_CODE (ost
) != INTEGER_CST
11937 || tree_int_cst_sgn (ost
) < 0
11938 || compare_tree_int (ost
, 3) > 0)
11940 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11942 expand_builtin_trap ();
11946 object_size_type
= tree_to_shwi (ost
);
11948 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11951 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11952 FCODE is the BUILT_IN_* to use.
11953 Return NULL_RTX if we failed; the caller should emit a normal call,
11954 otherwise try to get the result in TARGET, if convenient (and in
11955 mode MODE if that's convenient). */
11958 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11959 enum built_in_function fcode
)
11961 if (!validate_arglist (exp
,
11963 fcode
== BUILT_IN_MEMSET_CHK
11964 ? INTEGER_TYPE
: POINTER_TYPE
,
11965 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11968 tree dest
= CALL_EXPR_ARG (exp
, 0);
11969 tree src
= CALL_EXPR_ARG (exp
, 1);
11970 tree len
= CALL_EXPR_ARG (exp
, 2);
11971 tree size
= CALL_EXPR_ARG (exp
, 3);
11973 /* FIXME: Set access mode to write only for memset et al. */
11974 bool sizes_ok
= check_access (exp
, len
, /*maxread=*/NULL_TREE
,
11975 /*srcstr=*/NULL_TREE
, size
, access_read_write
);
11977 if (!tree_fits_uhwi_p (size
))
11980 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11982 /* Avoid transforming the checking call to an ordinary one when
11983 an overflow has been detected or when the call couldn't be
11984 validated because the size is not constant. */
11985 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11988 tree fn
= NULL_TREE
;
11989 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11990 mem{cpy,pcpy,move,set} is available. */
11993 case BUILT_IN_MEMCPY_CHK
:
11994 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11996 case BUILT_IN_MEMPCPY_CHK
:
11997 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11999 case BUILT_IN_MEMMOVE_CHK
:
12000 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12002 case BUILT_IN_MEMSET_CHK
:
12003 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12012 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12013 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12014 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12015 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12017 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12021 unsigned int dest_align
= get_pointer_alignment (dest
);
12023 /* If DEST is not a pointer type, call the normal function. */
12024 if (dest_align
== 0)
12027 /* If SRC and DEST are the same (and not volatile), do nothing. */
12028 if (operand_equal_p (src
, dest
, 0))
12032 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12034 /* Evaluate and ignore LEN in case it has side-effects. */
12035 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12036 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12039 expr
= fold_build_pointer_plus (dest
, len
);
12040 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12043 /* __memmove_chk special case. */
12044 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12046 unsigned int src_align
= get_pointer_alignment (src
);
12048 if (src_align
== 0)
12051 /* If src is categorized for a readonly section we can use
12052 normal __memcpy_chk. */
12053 if (readonly_data_expr (src
))
12055 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12058 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12059 dest
, src
, len
, size
);
12060 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12061 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12062 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12069 /* Emit warning if a buffer overflow is detected at compile time. */
12072 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12074 /* The source string. */
12075 tree srcstr
= NULL_TREE
;
12076 /* The size of the destination object returned by __builtin_object_size. */
12077 tree objsize
= NULL_TREE
;
12078 /* The string that is being concatenated with (as in __strcat_chk)
12079 or null if it isn't. */
12080 tree catstr
= NULL_TREE
;
12081 /* The maximum length of the source sequence in a bounded operation
12082 (such as __strncat_chk) or null if the operation isn't bounded
12083 (such as __strcat_chk). */
12084 tree maxread
= NULL_TREE
;
12085 /* The exact size of the access (such as in __strncpy_chk). */
12086 tree size
= NULL_TREE
;
12087 /* The access by the function that's checked. Except for snprintf
12088 both writing and reading is checked. */
12089 access_mode mode
= access_read_write
;
12093 case BUILT_IN_STRCPY_CHK
:
12094 case BUILT_IN_STPCPY_CHK
:
12095 srcstr
= CALL_EXPR_ARG (exp
, 1);
12096 objsize
= CALL_EXPR_ARG (exp
, 2);
12099 case BUILT_IN_STRCAT_CHK
:
12100 /* For __strcat_chk the warning will be emitted only if overflowing
12101 by at least strlen (dest) + 1 bytes. */
12102 catstr
= CALL_EXPR_ARG (exp
, 0);
12103 srcstr
= CALL_EXPR_ARG (exp
, 1);
12104 objsize
= CALL_EXPR_ARG (exp
, 2);
12107 case BUILT_IN_STRNCAT_CHK
:
12108 catstr
= CALL_EXPR_ARG (exp
, 0);
12109 srcstr
= CALL_EXPR_ARG (exp
, 1);
12110 maxread
= CALL_EXPR_ARG (exp
, 2);
12111 objsize
= CALL_EXPR_ARG (exp
, 3);
12114 case BUILT_IN_STRNCPY_CHK
:
12115 case BUILT_IN_STPNCPY_CHK
:
12116 srcstr
= CALL_EXPR_ARG (exp
, 1);
12117 size
= CALL_EXPR_ARG (exp
, 2);
12118 objsize
= CALL_EXPR_ARG (exp
, 3);
12121 case BUILT_IN_SNPRINTF_CHK
:
12122 case BUILT_IN_VSNPRINTF_CHK
:
12123 maxread
= CALL_EXPR_ARG (exp
, 1);
12124 objsize
= CALL_EXPR_ARG (exp
, 3);
12125 /* The only checked access the write to the destination. */
12126 mode
= access_write_only
;
12129 gcc_unreachable ();
12132 if (catstr
&& maxread
)
12134 /* Check __strncat_chk. There is no way to determine the length
12135 of the string to which the source string is being appended so
12136 just warn when the length of the source string is not known. */
12137 check_strncat_sizes (exp
, objsize
);
12141 check_access (exp
, size
, maxread
, srcstr
, objsize
, mode
);
12144 /* Emit warning if a buffer overflow is detected at compile time
12145 in __sprintf_chk/__vsprintf_chk calls. */
12148 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12150 tree size
, len
, fmt
;
12151 const char *fmt_str
;
12152 int nargs
= call_expr_nargs (exp
);
12154 /* Verify the required arguments in the original call. */
12158 size
= CALL_EXPR_ARG (exp
, 2);
12159 fmt
= CALL_EXPR_ARG (exp
, 3);
12161 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
12164 /* Check whether the format is a literal string constant. */
12165 fmt_str
= c_getstr (fmt
);
12166 if (fmt_str
== NULL
)
12169 if (!init_target_chars ())
12172 /* If the format doesn't contain % args or %%, we know its size. */
12173 if (strchr (fmt_str
, target_percent
) == 0)
12174 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12175 /* If the format is "%s" and first ... argument is a string literal,
12177 else if (fcode
== BUILT_IN_SPRINTF_CHK
12178 && strcmp (fmt_str
, target_percent_s
) == 0)
12184 arg
= CALL_EXPR_ARG (exp
, 4);
12185 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12188 len
= c_strlen (arg
, 1);
12189 if (!len
|| ! tree_fits_uhwi_p (len
))
12195 /* Add one for the terminating nul. */
12196 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
12198 check_access (exp
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, len
, size
,
12199 access_write_only
);
12202 /* Emit warning if a free is called with address of a variable. */
12205 maybe_emit_free_warning (tree exp
)
12207 if (call_expr_nargs (exp
) != 1)
12210 tree arg
= CALL_EXPR_ARG (exp
, 0);
12213 if (TREE_CODE (arg
) != ADDR_EXPR
)
12216 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12217 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12220 if (SSA_VAR_P (arg
))
12221 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12222 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12224 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12225 "%Kattempt to free a non-heap object", exp
);
12228 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12232 fold_builtin_object_size (tree ptr
, tree ost
)
12234 unsigned HOST_WIDE_INT bytes
;
12235 int object_size_type
;
12237 if (!validate_arg (ptr
, POINTER_TYPE
)
12238 || !validate_arg (ost
, INTEGER_TYPE
))
12243 if (TREE_CODE (ost
) != INTEGER_CST
12244 || tree_int_cst_sgn (ost
) < 0
12245 || compare_tree_int (ost
, 3) > 0)
12248 object_size_type
= tree_to_shwi (ost
);
12250 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12251 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12252 and (size_t) 0 for types 2 and 3. */
12253 if (TREE_SIDE_EFFECTS (ptr
))
12254 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12256 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12258 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
12259 if (wi::fits_to_tree_p (bytes
, size_type_node
))
12260 return build_int_cstu (size_type_node
, bytes
);
12262 else if (TREE_CODE (ptr
) == SSA_NAME
)
12264 /* If object size is not known yet, delay folding until
12265 later. Maybe subsequent passes will help determining
12267 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
12268 && wi::fits_to_tree_p (bytes
, size_type_node
))
12269 return build_int_cstu (size_type_node
, bytes
);
12275 /* Builtins with folding operations that operate on "..." arguments
12276 need special handling; we need to store the arguments in a convenient
12277 data structure before attempting any folding. Fortunately there are
12278 only a few builtins that fall into this category. FNDECL is the
12279 function, EXP is the CALL_EXPR for the call. */
12282 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
12284 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
12285 tree ret
= NULL_TREE
;
12289 case BUILT_IN_FPCLASSIFY
:
12290 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
12298 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
12299 SET_EXPR_LOCATION (ret
, loc
);
12300 TREE_NO_WARNING (ret
) = 1;
12306 /* Initialize format string characters in the target charset. */
12309 init_target_chars (void)
12314 target_newline
= lang_hooks
.to_target_charset ('\n');
12315 target_percent
= lang_hooks
.to_target_charset ('%');
12316 target_c
= lang_hooks
.to_target_charset ('c');
12317 target_s
= lang_hooks
.to_target_charset ('s');
12318 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12322 target_percent_c
[0] = target_percent
;
12323 target_percent_c
[1] = target_c
;
12324 target_percent_c
[2] = '\0';
12326 target_percent_s
[0] = target_percent
;
12327 target_percent_s
[1] = target_s
;
12328 target_percent_s
[2] = '\0';
12330 target_percent_s_newline
[0] = target_percent
;
12331 target_percent_s_newline
[1] = target_s
;
12332 target_percent_s_newline
[2] = target_newline
;
12333 target_percent_s_newline
[3] = '\0';
12340 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12341 and no overflow/underflow occurred. INEXACT is true if M was not
12342 exactly calculated. TYPE is the tree type for the result. This
12343 function assumes that you cleared the MPFR flags and then
12344 calculated M to see if anything subsequently set a flag prior to
12345 entering this function. Return NULL_TREE if any checks fail. */
12348 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12350 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12351 overflow/underflow occurred. If -frounding-math, proceed iff the
12352 result of calling FUNC was exact. */
12353 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12354 && (!flag_rounding_math
|| !inexact
))
12356 REAL_VALUE_TYPE rr
;
12358 real_from_mpfr (&rr
, m
, type
, MPFR_RNDN
);
12359 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12360 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12361 but the mpft_t is not, then we underflowed in the
12363 if (real_isfinite (&rr
)
12364 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12366 REAL_VALUE_TYPE rmode
;
12368 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12369 /* Proceed iff the specified mode can hold the value. */
12370 if (real_identical (&rmode
, &rr
))
12371 return build_real (type
, rmode
);
12377 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12378 number and no overflow/underflow occurred. INEXACT is true if M
12379 was not exactly calculated. TYPE is the tree type for the result.
12380 This function assumes that you cleared the MPFR flags and then
12381 calculated M to see if anything subsequently set a flag prior to
12382 entering this function. Return NULL_TREE if any checks fail, if
12383 FORCE_CONVERT is true, then bypass the checks. */
12386 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
12388 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12389 overflow/underflow occurred. If -frounding-math, proceed iff the
12390 result of calling FUNC was exact. */
12392 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
12393 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12394 && (!flag_rounding_math
|| !inexact
)))
12396 REAL_VALUE_TYPE re
, im
;
12398 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), MPFR_RNDN
);
12399 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), MPFR_RNDN
);
12400 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12401 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12402 but the mpft_t is not, then we underflowed in the
12405 || (real_isfinite (&re
) && real_isfinite (&im
)
12406 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
12407 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
12409 REAL_VALUE_TYPE re_mode
, im_mode
;
12411 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
12412 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
12413 /* Proceed iff the specified mode can hold the value. */
12415 || (real_identical (&re_mode
, &re
)
12416 && real_identical (&im_mode
, &im
)))
12417 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
12418 build_real (TREE_TYPE (type
), im_mode
));
12424 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12425 the pointer *(ARG_QUO) and return the result. The type is taken
12426 from the type of ARG0 and is used for setting the precision of the
12427 calculation and results. */
12430 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12432 tree
const type
= TREE_TYPE (arg0
);
12433 tree result
= NULL_TREE
;
12438 /* To proceed, MPFR must exactly represent the target floating point
12439 format, which only happens when the target base equals two. */
12440 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12441 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12442 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12444 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12445 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12447 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12449 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12450 const int prec
= fmt
->p
;
12451 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
12456 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12457 mpfr_from_real (m0
, ra0
, MPFR_RNDN
);
12458 mpfr_from_real (m1
, ra1
, MPFR_RNDN
);
12459 mpfr_clear_flags ();
12460 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12461 /* Remquo is independent of the rounding mode, so pass
12462 inexact=0 to do_mpfr_ckconv(). */
12463 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12464 mpfr_clears (m0
, m1
, NULL
);
12467 /* MPFR calculates quo in the host's long so it may
12468 return more bits in quo than the target int can hold
12469 if sizeof(host long) > sizeof(target int). This can
12470 happen even for native compilers in LP64 mode. In
12471 these cases, modulo the quo value with the largest
12472 number that the target int can hold while leaving one
12473 bit for the sign. */
12474 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12475 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12477 /* Dereference the quo pointer argument. */
12478 arg_quo
= build_fold_indirect_ref (arg_quo
);
12479 /* Proceed iff a valid pointer type was passed in. */
12480 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12482 /* Set the value. */
12484 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12485 build_int_cst (TREE_TYPE (arg_quo
),
12487 TREE_SIDE_EFFECTS (result_quo
) = 1;
12488 /* Combine the quo assignment with the rem. */
12489 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12490 result_quo
, result_rem
));
12498 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12499 resulting value as a tree with type TYPE. The mpfr precision is
12500 set to the precision of TYPE. We assume that this mpfr function
12501 returns zero if the result could be calculated exactly within the
12502 requested precision. In addition, the integer pointer represented
12503 by ARG_SG will be dereferenced and set to the appropriate signgam
12507 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12509 tree result
= NULL_TREE
;
12513 /* To proceed, MPFR must exactly represent the target floating point
12514 format, which only happens when the target base equals two. Also
12515 verify ARG is a constant and that ARG_SG is an int pointer. */
12516 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12517 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12518 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12519 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12521 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12523 /* In addition to NaN and Inf, the argument cannot be zero or a
12524 negative integer. */
12525 if (real_isfinite (ra
)
12526 && ra
->cl
!= rvc_zero
12527 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12529 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12530 const int prec
= fmt
->p
;
12531 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
12536 mpfr_init2 (m
, prec
);
12537 mpfr_from_real (m
, ra
, MPFR_RNDN
);
12538 mpfr_clear_flags ();
12539 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12540 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12546 /* Dereference the arg_sg pointer argument. */
12547 arg_sg
= build_fold_indirect_ref (arg_sg
);
12548 /* Assign the signgam value into *arg_sg. */
12549 result_sg
= fold_build2 (MODIFY_EXPR
,
12550 TREE_TYPE (arg_sg
), arg_sg
,
12551 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12552 TREE_SIDE_EFFECTS (result_sg
) = 1;
12553 /* Combine the signgam assignment with the lgamma result. */
12554 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12555 result_sg
, result_lg
));
12563 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12564 mpc function FUNC on it and return the resulting value as a tree
12565 with type TYPE. The mpfr precision is set to the precision of
12566 TYPE. We assume that function FUNC returns zero if the result
12567 could be calculated exactly within the requested precision. If
12568 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12569 in the arguments and/or results. */
12572 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12573 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12575 tree result
= NULL_TREE
;
12580 /* To proceed, MPFR must exactly represent the target floating point
12581 format, which only happens when the target base equals two. */
12582 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12584 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12585 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12586 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12588 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12589 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12590 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12591 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12594 || (real_isfinite (re0
) && real_isfinite (im0
)
12595 && real_isfinite (re1
) && real_isfinite (im1
)))
12597 const struct real_format
*const fmt
=
12598 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12599 const int prec
= fmt
->p
;
12600 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
12601 ? MPFR_RNDZ
: MPFR_RNDN
;
12602 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12606 mpc_init2 (m0
, prec
);
12607 mpc_init2 (m1
, prec
);
12608 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12609 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12610 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12611 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12612 mpfr_clear_flags ();
12613 inexact
= func (m0
, m0
, m1
, crnd
);
12614 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12623 /* A wrapper function for builtin folding that prevents warnings for
12624 "statement without effect" and the like, caused by removing the
12625 call node earlier than the warning is generated. */
12628 fold_call_stmt (gcall
*stmt
, bool ignore
)
12630 tree ret
= NULL_TREE
;
12631 tree fndecl
= gimple_call_fndecl (stmt
);
12632 location_t loc
= gimple_location (stmt
);
12633 if (fndecl
&& fndecl_built_in_p (fndecl
)
12634 && !gimple_call_va_arg_pack_p (stmt
))
12636 int nargs
= gimple_call_num_args (stmt
);
12637 tree
*args
= (nargs
> 0
12638 ? gimple_call_arg_ptr (stmt
, 0)
12639 : &error_mark_node
);
12641 if (avoid_folding_inline_builtin (fndecl
))
12643 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12645 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12649 ret
= fold_builtin_n (loc
, NULL_TREE
, fndecl
, args
, nargs
, ignore
);
12652 /* Propagate location information from original call to
12653 expansion of builtin. Otherwise things like
12654 maybe_emit_chk_warning, that operate on the expansion
12655 of a builtin, will use the wrong location information. */
12656 if (gimple_has_location (stmt
))
12658 tree realret
= ret
;
12659 if (TREE_CODE (ret
) == NOP_EXPR
)
12660 realret
= TREE_OPERAND (ret
, 0);
12661 if (CAN_HAVE_LOCATION_P (realret
)
12662 && !EXPR_HAS_LOCATION (realret
))
12663 SET_EXPR_LOCATION (realret
, loc
);
12673 /* Look up the function in builtin_decl that corresponds to DECL
12674 and set ASMSPEC as its user assembler name. DECL must be a
12675 function decl that declares a builtin. */
12678 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12680 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
12683 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12684 set_user_assembler_name (builtin
, asmspec
);
12686 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
12687 && INT_TYPE_SIZE
< BITS_PER_WORD
)
12689 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
12690 set_user_assembler_libfunc ("ffs", asmspec
);
12691 set_optab_libfunc (ffs_optab
, mode
, "ffs");
12695 /* Return true if DECL is a builtin that expands to a constant or similarly
12698 is_simple_builtin (tree decl
)
12700 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
12701 switch (DECL_FUNCTION_CODE (decl
))
12703 /* Builtins that expand to constants. */
12704 case BUILT_IN_CONSTANT_P
:
12705 case BUILT_IN_EXPECT
:
12706 case BUILT_IN_OBJECT_SIZE
:
12707 case BUILT_IN_UNREACHABLE
:
12708 /* Simple register moves or loads from stack. */
12709 case BUILT_IN_ASSUME_ALIGNED
:
12710 case BUILT_IN_RETURN_ADDRESS
:
12711 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12712 case BUILT_IN_FROB_RETURN_ADDR
:
12713 case BUILT_IN_RETURN
:
12714 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12715 case BUILT_IN_FRAME_ADDRESS
:
12716 case BUILT_IN_VA_END
:
12717 case BUILT_IN_STACK_SAVE
:
12718 case BUILT_IN_STACK_RESTORE
:
12719 /* Exception state returns or moves registers around. */
12720 case BUILT_IN_EH_FILTER
:
12721 case BUILT_IN_EH_POINTER
:
12722 case BUILT_IN_EH_COPY_VALUES
:
12732 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12733 most probably expanded inline into reasonably simple code. This is a
12734 superset of is_simple_builtin. */
12736 is_inexpensive_builtin (tree decl
)
12740 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12742 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12743 switch (DECL_FUNCTION_CODE (decl
))
12746 CASE_BUILT_IN_ALLOCA
:
12747 case BUILT_IN_BSWAP16
:
12748 case BUILT_IN_BSWAP32
:
12749 case BUILT_IN_BSWAP64
:
12750 case BUILT_IN_BSWAP128
:
12752 case BUILT_IN_CLZIMAX
:
12753 case BUILT_IN_CLZL
:
12754 case BUILT_IN_CLZLL
:
12756 case BUILT_IN_CTZIMAX
:
12757 case BUILT_IN_CTZL
:
12758 case BUILT_IN_CTZLL
:
12760 case BUILT_IN_FFSIMAX
:
12761 case BUILT_IN_FFSL
:
12762 case BUILT_IN_FFSLL
:
12763 case BUILT_IN_IMAXABS
:
12764 case BUILT_IN_FINITE
:
12765 case BUILT_IN_FINITEF
:
12766 case BUILT_IN_FINITEL
:
12767 case BUILT_IN_FINITED32
:
12768 case BUILT_IN_FINITED64
:
12769 case BUILT_IN_FINITED128
:
12770 case BUILT_IN_FPCLASSIFY
:
12771 case BUILT_IN_ISFINITE
:
12772 case BUILT_IN_ISINF_SIGN
:
12773 case BUILT_IN_ISINF
:
12774 case BUILT_IN_ISINFF
:
12775 case BUILT_IN_ISINFL
:
12776 case BUILT_IN_ISINFD32
:
12777 case BUILT_IN_ISINFD64
:
12778 case BUILT_IN_ISINFD128
:
12779 case BUILT_IN_ISNAN
:
12780 case BUILT_IN_ISNANF
:
12781 case BUILT_IN_ISNANL
:
12782 case BUILT_IN_ISNAND32
:
12783 case BUILT_IN_ISNAND64
:
12784 case BUILT_IN_ISNAND128
:
12785 case BUILT_IN_ISNORMAL
:
12786 case BUILT_IN_ISGREATER
:
12787 case BUILT_IN_ISGREATEREQUAL
:
12788 case BUILT_IN_ISLESS
:
12789 case BUILT_IN_ISLESSEQUAL
:
12790 case BUILT_IN_ISLESSGREATER
:
12791 case BUILT_IN_ISUNORDERED
:
12792 case BUILT_IN_VA_ARG_PACK
:
12793 case BUILT_IN_VA_ARG_PACK_LEN
:
12794 case BUILT_IN_VA_COPY
:
12795 case BUILT_IN_TRAP
:
12796 case BUILT_IN_SAVEREGS
:
12797 case BUILT_IN_POPCOUNTL
:
12798 case BUILT_IN_POPCOUNTLL
:
12799 case BUILT_IN_POPCOUNTIMAX
:
12800 case BUILT_IN_POPCOUNT
:
12801 case BUILT_IN_PARITYL
:
12802 case BUILT_IN_PARITYLL
:
12803 case BUILT_IN_PARITYIMAX
:
12804 case BUILT_IN_PARITY
:
12805 case BUILT_IN_LABS
:
12806 case BUILT_IN_LLABS
:
12807 case BUILT_IN_PREFETCH
:
12808 case BUILT_IN_ACC_ON_DEVICE
:
12812 return is_simple_builtin (decl
);
12818 /* Return true if T is a constant and the value cast to a target char
12819 can be represented by a host char.
12820 Store the casted char constant in *P if so. */
12823 target_char_cst_p (tree t
, char *p
)
12825 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
12828 *p
= (char)tree_to_uhwi (t
);
12832 /* Return true if the builtin DECL is implemented in a standard library.
12833 Otherwise returns false which doesn't guarantee it is not (thus the list of
12834 handled builtins below may be incomplete). */
12837 builtin_with_linkage_p (tree decl
)
12839 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12840 switch (DECL_FUNCTION_CODE (decl
))
12842 CASE_FLT_FN (BUILT_IN_ACOS
):
12843 CASE_FLT_FN (BUILT_IN_ACOSH
):
12844 CASE_FLT_FN (BUILT_IN_ASIN
):
12845 CASE_FLT_FN (BUILT_IN_ASINH
):
12846 CASE_FLT_FN (BUILT_IN_ATAN
):
12847 CASE_FLT_FN (BUILT_IN_ATANH
):
12848 CASE_FLT_FN (BUILT_IN_ATAN2
):
12849 CASE_FLT_FN (BUILT_IN_CBRT
):
12850 CASE_FLT_FN (BUILT_IN_CEIL
):
12851 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL
):
12852 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
12853 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
12854 CASE_FLT_FN (BUILT_IN_COS
):
12855 CASE_FLT_FN (BUILT_IN_COSH
):
12856 CASE_FLT_FN (BUILT_IN_ERF
):
12857 CASE_FLT_FN (BUILT_IN_ERFC
):
12858 CASE_FLT_FN (BUILT_IN_EXP
):
12859 CASE_FLT_FN (BUILT_IN_EXP2
):
12860 CASE_FLT_FN (BUILT_IN_EXPM1
):
12861 CASE_FLT_FN (BUILT_IN_FABS
):
12862 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
12863 CASE_FLT_FN (BUILT_IN_FDIM
):
12864 CASE_FLT_FN (BUILT_IN_FLOOR
):
12865 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR
):
12866 CASE_FLT_FN (BUILT_IN_FMA
):
12867 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
12868 CASE_FLT_FN (BUILT_IN_FMAX
):
12869 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX
):
12870 CASE_FLT_FN (BUILT_IN_FMIN
):
12871 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN
):
12872 CASE_FLT_FN (BUILT_IN_FMOD
):
12873 CASE_FLT_FN (BUILT_IN_FREXP
):
12874 CASE_FLT_FN (BUILT_IN_HYPOT
):
12875 CASE_FLT_FN (BUILT_IN_ILOGB
):
12876 CASE_FLT_FN (BUILT_IN_LDEXP
):
12877 CASE_FLT_FN (BUILT_IN_LGAMMA
):
12878 CASE_FLT_FN (BUILT_IN_LLRINT
):
12879 CASE_FLT_FN (BUILT_IN_LLROUND
):
12880 CASE_FLT_FN (BUILT_IN_LOG
):
12881 CASE_FLT_FN (BUILT_IN_LOG10
):
12882 CASE_FLT_FN (BUILT_IN_LOG1P
):
12883 CASE_FLT_FN (BUILT_IN_LOG2
):
12884 CASE_FLT_FN (BUILT_IN_LOGB
):
12885 CASE_FLT_FN (BUILT_IN_LRINT
):
12886 CASE_FLT_FN (BUILT_IN_LROUND
):
12887 CASE_FLT_FN (BUILT_IN_MODF
):
12888 CASE_FLT_FN (BUILT_IN_NAN
):
12889 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
12890 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT
):
12891 CASE_FLT_FN (BUILT_IN_NEXTAFTER
):
12892 CASE_FLT_FN (BUILT_IN_NEXTTOWARD
):
12893 CASE_FLT_FN (BUILT_IN_POW
):
12894 CASE_FLT_FN (BUILT_IN_REMAINDER
):
12895 CASE_FLT_FN (BUILT_IN_REMQUO
):
12896 CASE_FLT_FN (BUILT_IN_RINT
):
12897 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT
):
12898 CASE_FLT_FN (BUILT_IN_ROUND
):
12899 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND
):
12900 CASE_FLT_FN (BUILT_IN_SCALBLN
):
12901 CASE_FLT_FN (BUILT_IN_SCALBN
):
12902 CASE_FLT_FN (BUILT_IN_SIN
):
12903 CASE_FLT_FN (BUILT_IN_SINH
):
12904 CASE_FLT_FN (BUILT_IN_SINCOS
):
12905 CASE_FLT_FN (BUILT_IN_SQRT
):
12906 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT
):
12907 CASE_FLT_FN (BUILT_IN_TAN
):
12908 CASE_FLT_FN (BUILT_IN_TANH
):
12909 CASE_FLT_FN (BUILT_IN_TGAMMA
):
12910 CASE_FLT_FN (BUILT_IN_TRUNC
):
12911 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC
):
12919 /* Return true if OFFRNG is bounded to a subrange of offset values
12920 valid for the largest possible object. */
12923 access_ref::offset_bounded () const
12925 tree min
= TYPE_MIN_VALUE (ptrdiff_type_node
);
12926 tree max
= TYPE_MAX_VALUE (ptrdiff_type_node
);
12927 return wi::to_offset (min
) <= offrng
[0] && offrng
[1] <= wi::to_offset (max
);
12930 /* If CALLEE has known side effects, fill in INFO and return true.
12931 See tree-ssa-structalias.c:find_func_aliases
12932 for the list of builtins we might need to handle here. */
12935 builtin_fnspec (tree callee
)
12937 built_in_function code
= DECL_FUNCTION_CODE (callee
);
12941 /* All the following functions read memory pointed to by
12942 their second argument and write memory pointed to by first
12944 strcat/strncat additionally reads memory pointed to by the first
12946 case BUILT_IN_STRCAT
:
12947 case BUILT_IN_STRCAT_CHK
:
12949 case BUILT_IN_STRNCAT
:
12950 case BUILT_IN_STRNCAT_CHK
:
12952 case BUILT_IN_STRCPY
:
12953 case BUILT_IN_STRCPY_CHK
:
12955 case BUILT_IN_STPCPY
:
12956 case BUILT_IN_STPCPY_CHK
:
12958 case BUILT_IN_STRNCPY
:
12959 case BUILT_IN_MEMCPY
:
12960 case BUILT_IN_MEMMOVE
:
12961 case BUILT_IN_TM_MEMCPY
:
12962 case BUILT_IN_TM_MEMMOVE
:
12963 case BUILT_IN_STRNCPY_CHK
:
12964 case BUILT_IN_MEMCPY_CHK
:
12965 case BUILT_IN_MEMMOVE_CHK
:
12967 case BUILT_IN_MEMPCPY
:
12968 case BUILT_IN_MEMPCPY_CHK
:
12970 case BUILT_IN_STPNCPY
:
12971 case BUILT_IN_STPNCPY_CHK
:
12973 case BUILT_IN_BCOPY
:
12975 case BUILT_IN_BZERO
:
12977 case BUILT_IN_MEMCMP
:
12978 case BUILT_IN_MEMCMP_EQ
:
12979 case BUILT_IN_BCMP
:
12980 case BUILT_IN_STRNCMP
:
12981 case BUILT_IN_STRNCMP_EQ
:
12982 case BUILT_IN_STRNCASECMP
:
12985 /* The following functions read memory pointed to by their
12987 CASE_BUILT_IN_TM_LOAD (1):
12988 CASE_BUILT_IN_TM_LOAD (2):
12989 CASE_BUILT_IN_TM_LOAD (4):
12990 CASE_BUILT_IN_TM_LOAD (8):
12991 CASE_BUILT_IN_TM_LOAD (FLOAT
):
12992 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
12993 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
12994 CASE_BUILT_IN_TM_LOAD (M64
):
12995 CASE_BUILT_IN_TM_LOAD (M128
):
12996 CASE_BUILT_IN_TM_LOAD (M256
):
12997 case BUILT_IN_TM_LOG
:
12998 case BUILT_IN_TM_LOG_1
:
12999 case BUILT_IN_TM_LOG_2
:
13000 case BUILT_IN_TM_LOG_4
:
13001 case BUILT_IN_TM_LOG_8
:
13002 case BUILT_IN_TM_LOG_FLOAT
:
13003 case BUILT_IN_TM_LOG_DOUBLE
:
13004 case BUILT_IN_TM_LOG_LDOUBLE
:
13005 case BUILT_IN_TM_LOG_M64
:
13006 case BUILT_IN_TM_LOG_M128
:
13007 case BUILT_IN_TM_LOG_M256
:
13010 case BUILT_IN_INDEX
:
13011 case BUILT_IN_RINDEX
:
13012 case BUILT_IN_STRCHR
:
13013 case BUILT_IN_STRLEN
:
13014 case BUILT_IN_STRRCHR
:
13016 case BUILT_IN_STRNLEN
:
13019 /* These read memory pointed to by the first argument.
13020 Allocating memory does not have any side-effects apart from
13021 being the definition point for the pointer.
13022 Unix98 specifies that errno is set on allocation failure. */
13023 case BUILT_IN_STRDUP
:
13025 case BUILT_IN_STRNDUP
:
13027 /* Allocating memory does not have any side-effects apart from
13028 being the definition point for the pointer. */
13029 case BUILT_IN_MALLOC
:
13030 case BUILT_IN_ALIGNED_ALLOC
:
13031 case BUILT_IN_CALLOC
:
13032 case BUILT_IN_GOMP_ALLOC
:
13034 CASE_BUILT_IN_ALLOCA
:
13036 /* These read memory pointed to by the first argument with size
13037 in the third argument. */
13038 case BUILT_IN_MEMCHR
:
13040 /* These read memory pointed to by the first and second arguments. */
13041 case BUILT_IN_STRSTR
:
13042 case BUILT_IN_STRPBRK
:
13043 case BUILT_IN_STRCASECMP
:
13044 case BUILT_IN_STRCSPN
:
13045 case BUILT_IN_STRSPN
:
13046 case BUILT_IN_STRCMP
:
13047 case BUILT_IN_STRCMP_EQ
:
13049 /* Freeing memory kills the pointed-to memory. More importantly
13050 the call has to serve as a barrier for moving loads and stores
13052 case BUILT_IN_STACK_RESTORE
:
13053 case BUILT_IN_FREE
:
13054 case BUILT_IN_GOMP_FREE
:
13056 case BUILT_IN_VA_END
:
13058 /* Realloc serves both as allocation point and deallocation point. */
13059 case BUILT_IN_REALLOC
:
13061 case BUILT_IN_GAMMA_R
:
13062 case BUILT_IN_GAMMAF_R
:
13063 case BUILT_IN_GAMMAL_R
:
13064 case BUILT_IN_LGAMMA_R
:
13065 case BUILT_IN_LGAMMAF_R
:
13066 case BUILT_IN_LGAMMAL_R
:
13068 case BUILT_IN_FREXP
:
13069 case BUILT_IN_FREXPF
:
13070 case BUILT_IN_FREXPL
:
13071 case BUILT_IN_MODF
:
13072 case BUILT_IN_MODFF
:
13073 case BUILT_IN_MODFL
:
13075 case BUILT_IN_REMQUO
:
13076 case BUILT_IN_REMQUOF
:
13077 case BUILT_IN_REMQUOL
:
13079 case BUILT_IN_SINCOS
:
13080 case BUILT_IN_SINCOSF
:
13081 case BUILT_IN_SINCOSL
:
13083 case BUILT_IN_MEMSET
:
13084 case BUILT_IN_MEMSET_CHK
:
13085 case BUILT_IN_TM_MEMSET
:
13087 CASE_BUILT_IN_TM_STORE (1):
13088 CASE_BUILT_IN_TM_STORE (2):
13089 CASE_BUILT_IN_TM_STORE (4):
13090 CASE_BUILT_IN_TM_STORE (8):
13091 CASE_BUILT_IN_TM_STORE (FLOAT
):
13092 CASE_BUILT_IN_TM_STORE (DOUBLE
):
13093 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
13094 CASE_BUILT_IN_TM_STORE (M64
):
13095 CASE_BUILT_IN_TM_STORE (M128
):
13096 CASE_BUILT_IN_TM_STORE (M256
):
13098 case BUILT_IN_STACK_SAVE
:
13100 case BUILT_IN_ASSUME_ALIGNED
:
13102 /* But posix_memalign stores a pointer into the memory pointed to
13103 by its first argument. */
13104 case BUILT_IN_POSIX_MEMALIGN
: