1 /* Expand builtin functions.
2 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins
;
87 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names
[BUILT_IN_LAST
]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names
[(int) END_BUILTINS
] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info
[(int)END_BUILTINS
];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p
;
107 static int target_char_cast (tree
, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx
result_vector (int, rtx
);
111 static void expand_builtin_prefetch (tree
);
112 static rtx
expand_builtin_apply_args (void);
113 static rtx
expand_builtin_apply_args_1 (void);
114 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
115 static void expand_builtin_return (rtx
);
116 static rtx
expand_builtin_classify_type (tree
);
117 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
118 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
119 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
120 static rtx
expand_builtin_sincos (tree
);
121 static rtx
expand_builtin_fegetround (tree
, rtx
, machine_mode
);
122 static rtx
expand_builtin_feclear_feraise_except (tree
, rtx
, machine_mode
,
124 static rtx
expand_builtin_cexpi (tree
, rtx
);
125 static rtx
expand_builtin_issignaling (tree
, rtx
);
126 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
127 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
128 static rtx
expand_builtin_next_arg (void);
129 static rtx
expand_builtin_va_start (tree
);
130 static rtx
expand_builtin_va_end (tree
);
131 static rtx
expand_builtin_va_copy (tree
);
132 static rtx
inline_expand_builtin_bytecmp (tree
, rtx
);
133 static rtx
expand_builtin_strcmp (tree
, rtx
);
134 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
135 static rtx
expand_builtin_memcpy (tree
, rtx
);
136 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
137 rtx target
, tree exp
,
140 static rtx
expand_builtin_memmove (tree
, rtx
);
141 static rtx
expand_builtin_mempcpy (tree
, rtx
);
142 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, memop_ret
);
143 static rtx
expand_builtin_strcpy (tree
, rtx
);
144 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
145 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_strncpy (tree
, rtx
);
147 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
148 static rtx
expand_builtin_bzero (tree
);
149 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
150 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
151 static rtx
expand_builtin_alloca (tree
);
152 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
153 static rtx
expand_builtin_frame_address (tree
, tree
);
154 static tree
stabilize_va_list_loc (location_t
, tree
, int);
155 static rtx
expand_builtin_expect (tree
, rtx
);
156 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
157 static tree
fold_builtin_constant_p (tree
);
158 static tree
fold_builtin_classify_type (tree
);
159 static tree
fold_builtin_strlen (location_t
, tree
, tree
, tree
);
160 static tree
fold_builtin_inf (location_t
, tree
, int);
161 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
162 static bool validate_arg (const_tree
, enum tree_code code
);
163 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
164 static rtx
expand_builtin_signbit (tree
, rtx
);
165 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
166 static tree
fold_builtin_isascii (location_t
, tree
);
167 static tree
fold_builtin_toascii (location_t
, tree
);
168 static tree
fold_builtin_isdigit (location_t
, tree
);
169 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
170 static tree
fold_builtin_abs (location_t
, tree
, tree
);
171 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
173 static tree
fold_builtin_iseqsig (location_t
, tree
, tree
);
174 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
176 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
, tree
);
177 static tree
fold_builtin_strspn (location_t
, tree
, tree
, tree
);
178 static tree
fold_builtin_strcspn (location_t
, tree
, tree
, tree
);
180 static rtx
expand_builtin_object_size (tree
);
181 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
182 enum built_in_function
);
183 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
184 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
185 static tree
fold_builtin_object_size (tree
, tree
, enum built_in_function
);
187 unsigned HOST_WIDE_INT target_newline
;
188 unsigned HOST_WIDE_INT target_percent
;
189 static unsigned HOST_WIDE_INT target_c
;
190 static unsigned HOST_WIDE_INT target_s
;
191 char target_percent_c
[3];
192 char target_percent_s
[3];
193 char target_percent_s_newline
[4];
194 static tree
do_mpfr_remquo (tree
, tree
, tree
);
195 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
201 is_builtin_name (const char *name
)
203 return (startswith (name
, "__builtin_")
204 || startswith (name
, "__sync_")
205 || startswith (name
, "__atomic_"));
208 /* Return true if NODE should be considered for inline expansion regardless
209 of the optimization level. This means whenever a function is invoked with
210 its "internal" name, which normally contains the prefix "__builtin". */
213 called_as_built_in (tree node
)
215 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
216 we want the name used to call the function, not the name it
218 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
219 return is_builtin_name (name
);
222 /* Compute values M and N such that M divides (address of EXP - N) and such
223 that N < M. If these numbers can be determined, store M in alignp and N in
224 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
225 *alignp and any bit-offset to *bitposp.
227 Note that the address (and thus the alignment) computed here is based
228 on the address to which a symbol resolves, whereas DECL_ALIGN is based
229 on the address at which an object is actually located. These two
230 addresses are not always the same. For example, on ARM targets,
231 the address &foo of a Thumb function foo() has the lowest bit set,
232 whereas foo() itself starts on an even address.
234 If ADDR_P is true we are taking the address of the memory reference EXP
235 and thus cannot rely on the access taking place. */
238 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
239 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
241 poly_int64 bitsize
, bitpos
;
244 int unsignedp
, reversep
, volatilep
;
245 unsigned int align
= BITS_PER_UNIT
;
246 bool known_alignment
= false;
248 /* Get the innermost object and the constant (bitpos) and possibly
249 variable (offset) offset of the access. */
250 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
251 &unsignedp
, &reversep
, &volatilep
);
253 /* Extract alignment information from the innermost object and
254 possibly adjust bitpos and offset. */
255 if (TREE_CODE (exp
) == FUNCTION_DECL
)
257 /* Function addresses can encode extra information besides their
258 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
259 allows the low bit to be used as a virtual bit, we know
260 that the address itself must be at least 2-byte aligned. */
261 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
262 align
= 2 * BITS_PER_UNIT
;
264 else if (TREE_CODE (exp
) == LABEL_DECL
)
266 else if (TREE_CODE (exp
) == CONST_DECL
)
268 /* The alignment of a CONST_DECL is determined by its initializer. */
269 exp
= DECL_INITIAL (exp
);
270 align
= TYPE_ALIGN (TREE_TYPE (exp
));
271 if (CONSTANT_CLASS_P (exp
))
272 align
= targetm
.constant_alignment (exp
, align
);
274 known_alignment
= true;
276 else if (DECL_P (exp
))
278 align
= DECL_ALIGN (exp
);
279 known_alignment
= true;
281 else if (TREE_CODE (exp
) == INDIRECT_REF
282 || TREE_CODE (exp
) == MEM_REF
283 || TREE_CODE (exp
) == TARGET_MEM_REF
)
285 tree addr
= TREE_OPERAND (exp
, 0);
287 unsigned HOST_WIDE_INT ptr_bitpos
;
288 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
290 /* If the address is explicitely aligned, handle that. */
291 if (TREE_CODE (addr
) == BIT_AND_EXPR
292 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
294 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
295 ptr_bitmask
*= BITS_PER_UNIT
;
296 align
= least_bit_hwi (ptr_bitmask
);
297 addr
= TREE_OPERAND (addr
, 0);
301 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
302 align
= MAX (ptr_align
, align
);
304 /* Re-apply explicit alignment to the bitpos. */
305 ptr_bitpos
&= ptr_bitmask
;
307 /* The alignment of the pointer operand in a TARGET_MEM_REF
308 has to take the variable offset parts into account. */
309 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
313 unsigned HOST_WIDE_INT step
= 1;
315 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
316 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
318 if (TMR_INDEX2 (exp
))
319 align
= BITS_PER_UNIT
;
320 known_alignment
= false;
323 /* When EXP is an actual memory reference then we can use
324 TYPE_ALIGN of a pointer indirection to derive alignment.
325 Do so only if get_pointer_alignment_1 did not reveal absolute
326 alignment knowledge and if using that alignment would
327 improve the situation. */
329 if (!addr_p
&& !known_alignment
330 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
335 /* Else adjust bitpos accordingly. */
336 bitpos
+= ptr_bitpos
;
337 if (TREE_CODE (exp
) == MEM_REF
338 || TREE_CODE (exp
) == TARGET_MEM_REF
)
339 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
342 else if (TREE_CODE (exp
) == STRING_CST
)
344 /* STRING_CST are the only constant objects we allow to be not
345 wrapped inside a CONST_DECL. */
346 align
= TYPE_ALIGN (TREE_TYPE (exp
));
347 if (CONSTANT_CLASS_P (exp
))
348 align
= targetm
.constant_alignment (exp
, align
);
350 known_alignment
= true;
353 /* If there is a non-constant offset part extract the maximum
354 alignment that can prevail. */
357 unsigned int trailing_zeros
= tree_ctz (offset
);
358 if (trailing_zeros
< HOST_BITS_PER_INT
)
360 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
362 align
= MIN (align
, inner
);
366 /* Account for the alignment of runtime coefficients, so that the constant
367 bitpos is guaranteed to be accurate. */
368 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
369 if (alt_align
!= 0 && alt_align
< align
)
372 known_alignment
= false;
376 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
377 return known_alignment
;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
387 unsigned HOST_WIDE_INT
*bitposp
)
389 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
391 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
392 exp
= TREE_OPERAND (exp
, 0);
393 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
396 /* Return the alignment in bits of EXP, an object. */
399 get_object_alignment (tree exp
)
401 unsigned HOST_WIDE_INT bitpos
= 0;
404 get_object_alignment_1 (exp
, &align
, &bitpos
);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
410 align
= least_bit_hwi (bitpos
);
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
422 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
423 unsigned HOST_WIDE_INT
*bitposp
)
427 if (TREE_CODE (exp
) == ADDR_EXPR
)
428 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
429 alignp
, bitposp
, true);
430 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
433 unsigned HOST_WIDE_INT bitpos
;
434 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
436 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
437 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
440 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
441 if (trailing_zeros
< HOST_BITS_PER_INT
)
443 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
445 align
= MIN (align
, inner
);
449 *bitposp
= bitpos
& (align
- 1);
452 else if (TREE_CODE (exp
) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp
)))
455 unsigned int ptr_align
, ptr_misalign
;
456 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
458 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
460 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
461 *alignp
= ptr_align
* BITS_PER_UNIT
;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
465 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
466 /* We cannot really tell whether this result is an approximation. */
472 *alignp
= BITS_PER_UNIT
;
476 else if (TREE_CODE (exp
) == INTEGER_CST
)
478 *alignp
= BIGGEST_ALIGNMENT
;
479 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
480 & (BIGGEST_ALIGNMENT
- 1));
485 *alignp
= BITS_PER_UNIT
;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
497 get_pointer_alignment (tree exp
)
499 unsigned HOST_WIDE_INT bitpos
= 0;
502 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
508 align
= least_bit_hwi (bitpos
);
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
520 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
526 /* Optimize the common case of plain char. */
527 for (n
= 0; n
< maxelts
; n
++)
529 const char *elt
= (const char*) ptr
+ n
;
536 for (n
= 0; n
< maxelts
; n
++)
538 const char *elt
= (const char*) ptr
+ n
* eltsize
;
539 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
546 /* Compute the length of a null-terminated character string or wide
547 character string handling character sizes of 1, 2, and 4 bytes.
548 TREE_STRING_LENGTH is not the right way because it evaluates to
549 the size of the character array in bytes (as opposed to characters)
550 and because it can contain a zero byte in the middle.
552 ONLY_VALUE should be nonzero if the result is not going to be emitted
553 into the instruction stream and zero if it is going to be expanded.
554 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
555 is returned, otherwise NULL, since
556 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
557 evaluate the side-effects.
559 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
560 accesses. Note that this implies the result is not going to be emitted
561 into the instruction stream.
563 Additional information about the string accessed may be recorded
564 in DATA. For example, if ARG references an unterminated string,
565 then the declaration will be stored in the DECL field. If the
566 length of the unterminated string can be determined, it'll be
567 stored in the LEN field. Note this length could well be different
568 than what a C strlen call would return.
570 ELTSIZE is 1 for normal single byte character strings, and 2 or
571 4 for wide characer strings. ELTSIZE is by default 1.
573 The value returned is of type `ssizetype'. */
576 c_strlen (tree arg
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
578 /* If we were not passed a DATA pointer, then get one to a local
579 structure. That avoids having to check DATA for NULL before
580 each time we want to use it. */
581 c_strlen_data local_strlen_data
= { };
583 data
= &local_strlen_data
;
585 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
587 tree src
= STRIP_NOPS (arg
);
588 if (TREE_CODE (src
) == COND_EXPR
589 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
593 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
594 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
595 if (tree_int_cst_equal (len1
, len2
))
599 if (TREE_CODE (src
) == COMPOUND_EXPR
600 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
601 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
603 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
605 /* Offset from the beginning of the string in bytes. */
609 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
613 /* Determine the size of the string element. */
614 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
617 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
618 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
619 in case the latter is less than the size of the array, such as when
620 SRC refers to a short string literal used to initialize a large array.
621 In that case, the elements of the array after the terminating NUL are
623 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
624 strelts
= strelts
/ eltsize
;
626 if (!tree_fits_uhwi_p (memsize
))
629 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
631 /* PTR can point to the byte representation of any string type, including
632 char* and wchar_t*. */
633 const char *ptr
= TREE_STRING_POINTER (src
);
635 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
637 /* The code below works only for single byte character types. */
641 /* If the string has an internal NUL character followed by any
642 non-NUL characters (e.g., "foo\0bar"), we can't compute
643 the offset to the following NUL if we don't know where to
644 start searching for it. */
645 unsigned len
= string_length (ptr
, eltsize
, strelts
);
647 /* Return when an embedded null character is found or none at all.
648 In the latter case, set the DECL/LEN field in the DATA structure
649 so that callers may examine them. */
650 if (len
+ 1 < strelts
)
652 else if (len
>= maxelts
)
656 data
->minlen
= ssize_int (len
);
660 /* For empty strings the result should be zero. */
662 return ssize_int (0);
664 /* We don't know the starting offset, but we do know that the string
665 has no internal zero bytes. If the offset falls within the bounds
666 of the string subtract the offset from the length of the string,
667 and return that. Otherwise the length is zero. Take care to
668 use SAVE_EXPR in case the OFFSET has side-effects. */
669 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
671 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
672 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
674 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
676 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
677 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
678 build_zero_cst (ssizetype
));
681 /* Offset from the beginning of the string in elements. */
682 HOST_WIDE_INT eltoff
;
684 /* We have a known offset into the string. Start searching there for
685 a null character if we can represent it as a single HOST_WIDE_INT. */
688 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
691 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
693 /* If the offset is known to be out of bounds, warn, and call strlen at
695 if (eltoff
< 0 || eltoff
>= maxelts
)
697 /* Suppress multiple warnings for propagated constant strings. */
699 && !warning_suppressed_p (arg
, OPT_Warray_bounds_
)
700 && warning_at (loc
, OPT_Warray_bounds_
,
701 "offset %qwi outside bounds of constant string",
705 inform (DECL_SOURCE_LOCATION (decl
), "%qE declared here", decl
);
706 suppress_warning (arg
, OPT_Warray_bounds_
);
711 /* If eltoff is larger than strelts but less than maxelts the
712 string length is zero, since the excess memory will be zero. */
713 if (eltoff
> strelts
)
714 return ssize_int (0);
716 /* Use strlen to search for the first zero byte. Since any strings
717 constructed with build_string will have nulls appended, we win even
718 if we get handed something like (char[4])"abcd".
720 Since ELTOFF is our starting index into the string, no further
721 calculation is needed. */
722 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
725 /* Don't know what to return if there was no zero termination.
726 Ideally this would turn into a gcc_checking_assert over time.
727 Set DECL/LEN so callers can examine them. */
728 if (len
>= maxelts
- eltoff
)
732 data
->minlen
= ssize_int (len
);
736 return ssize_int (len
);
739 /* Return a constant integer corresponding to target reading
740 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
741 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
742 are assumed to be zero, otherwise it reads as many characters
746 c_readstr (const char *str
, scalar_int_mode mode
,
747 bool null_terminated_p
/*=true*/)
751 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
753 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
754 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
755 / HOST_BITS_PER_WIDE_INT
;
757 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
758 for (i
= 0; i
< len
; i
++)
762 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
765 if (WORDS_BIG_ENDIAN
)
766 j
= GET_MODE_SIZE (mode
) - i
- 1;
767 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
768 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
769 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
772 if (ch
|| !null_terminated_p
)
773 ch
= (unsigned char) str
[i
];
774 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
777 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
778 return immed_wide_int_const (c
, mode
);
781 /* Cast a target constant CST to target CHAR and if that value fits into
782 host char type, return zero and put that value into variable pointed to by
786 target_char_cast (tree cst
, char *p
)
788 unsigned HOST_WIDE_INT val
, hostval
;
790 if (TREE_CODE (cst
) != INTEGER_CST
791 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
794 /* Do not care if it fits or not right here. */
795 val
= TREE_INT_CST_LOW (cst
);
797 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
798 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
801 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
802 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
811 /* Similar to save_expr, but assumes that arbitrary code is not executed
812 in between the multiple evaluations. In particular, we assume that a
813 non-addressable local variable will not be modified. */
816 builtin_save_expr (tree exp
)
818 if (TREE_CODE (exp
) == SSA_NAME
819 || (TREE_ADDRESSABLE (exp
) == 0
820 && (TREE_CODE (exp
) == PARM_DECL
821 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
824 return save_expr (exp
);
827 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
828 times to get the address of either a higher stack frame, or a return
829 address located within it (depending on FNDECL_CODE). */
832 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
835 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
838 /* For a zero count with __builtin_return_address, we don't care what
839 frame address we return, because target-specific definitions will
840 override us. Therefore frame pointer elimination is OK, and using
841 the soft frame pointer is OK.
843 For a nonzero count, or a zero count with __builtin_frame_address,
844 we require a stable offset from the current frame pointer to the
845 previous one, so we must use the hard frame pointer, and
846 we must disable frame pointer elimination. */
847 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
848 tem
= frame_pointer_rtx
;
851 tem
= hard_frame_pointer_rtx
;
853 /* Tell reload not to eliminate the frame pointer. */
854 crtl
->accesses_prior_frames
= 1;
859 SETUP_FRAME_ADDRESSES ();
861 /* On the SPARC, the return address is not in the frame, it is in a
862 register. There is no way to access it off of the current frame
863 pointer, but it can be accessed off the previous frame pointer by
864 reading the value from the register window save area. */
865 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
868 /* Scan back COUNT frames to the specified frame. */
869 for (i
= 0; i
< count
; i
++)
871 /* Assume the dynamic chain pointer is in the word that the
872 frame address points to, unless otherwise specified. */
873 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
874 tem
= memory_address (Pmode
, tem
);
875 tem
= gen_frame_mem (Pmode
, tem
);
876 tem
= copy_to_reg (tem
);
879 /* For __builtin_frame_address, return what we've got. But, on
880 the SPARC for example, we may have to add a bias. */
881 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
882 return FRAME_ADDR_RTX (tem
);
884 /* For __builtin_return_address, get the return address from that frame. */
885 #ifdef RETURN_ADDR_RTX
886 tem
= RETURN_ADDR_RTX (count
, tem
);
888 tem
= memory_address (Pmode
,
889 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
890 tem
= gen_frame_mem (Pmode
, tem
);
895 /* Alias set used for setjmp buffer. */
896 static alias_set_type setjmp_alias_set
= -1;
898 /* Construct the leading half of a __builtin_setjmp call. Control will
899 return to RECEIVER_LABEL. This is also called directly by the SJLJ
900 exception handling code. */
903 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
905 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
909 if (setjmp_alias_set
== -1)
910 setjmp_alias_set
= new_alias_set ();
912 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
914 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
916 /* We store the frame pointer and the address of receiver_label in
917 the buffer and use the rest of it for the stack save area, which
918 is machine-dependent. */
920 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
921 set_mem_alias_set (mem
, setjmp_alias_set
);
922 emit_move_insn (mem
, hard_frame_pointer_rtx
);
924 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
925 GET_MODE_SIZE (Pmode
))),
926 set_mem_alias_set (mem
, setjmp_alias_set
);
928 emit_move_insn (validize_mem (mem
),
929 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
931 stack_save
= gen_rtx_MEM (sa_mode
,
932 plus_constant (Pmode
, buf_addr
,
933 2 * GET_MODE_SIZE (Pmode
)));
934 set_mem_alias_set (stack_save
, setjmp_alias_set
);
935 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
937 /* If there is further processing to do, do it. */
938 if (targetm
.have_builtin_setjmp_setup ())
939 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
941 /* We have a nonlocal label. */
942 cfun
->has_nonlocal_label
= 1;
945 /* Construct the trailing part of a __builtin_setjmp call. This is
946 also called directly by the SJLJ exception handling code.
947 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
950 expand_builtin_setjmp_receiver (rtx receiver_label
)
954 /* Mark the FP as used when we get here, so we have to make sure it's
955 marked as used by this function. */
956 emit_use (hard_frame_pointer_rtx
);
958 /* Mark the static chain as clobbered here so life information
959 doesn't get messed up for it. */
960 chain
= rtx_for_static_chain (current_function_decl
, true);
961 if (chain
&& REG_P (chain
))
962 emit_clobber (chain
);
964 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
966 /* If the argument pointer can be eliminated in favor of the
967 frame pointer, we don't need to restore it. We assume here
968 that if such an elimination is present, it can always be used.
969 This is the case on all known machines; if we don't make this
970 assumption, we do unnecessary saving on many machines. */
972 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
974 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
975 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
976 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
979 if (i
== ARRAY_SIZE (elim_regs
))
981 /* Now restore our arg pointer from the address at which it
982 was saved in our stack frame. */
983 emit_move_insn (crtl
->args
.internal_arg_pointer
,
984 copy_to_reg (get_arg_pointer_save_area ()));
988 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
989 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
990 else if (targetm
.have_nonlocal_goto_receiver ())
991 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
995 /* We must not allow the code we just generated to be reordered by
996 scheduling. Specifically, the update of the frame pointer must
997 happen immediately, not later. */
998 emit_insn (gen_blockage ());
1001 /* __builtin_longjmp is passed a pointer to an array of five words (not
1002 all will be used on all machines). It operates similarly to the C
1003 library function of the same name, but is more efficient. Much of
1004 the code below is copied from the handling of non-local gotos. */
1007 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1010 rtx_insn
*insn
, *last
;
1011 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1013 /* DRAP is needed for stack realign if longjmp is expanded to current
1015 if (SUPPORTS_STACK_ALIGNMENT
)
1016 crtl
->need_drap
= true;
1018 if (setjmp_alias_set
== -1)
1019 setjmp_alias_set
= new_alias_set ();
1021 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1023 buf_addr
= force_reg (Pmode
, buf_addr
);
1025 /* We require that the user must pass a second argument of 1, because
1026 that is what builtin_setjmp will return. */
1027 gcc_assert (value
== const1_rtx
);
1029 last
= get_last_insn ();
1030 if (targetm
.have_builtin_longjmp ())
1031 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1034 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1035 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1036 GET_MODE_SIZE (Pmode
)));
1038 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1039 2 * GET_MODE_SIZE (Pmode
)));
1040 set_mem_alias_set (fp
, setjmp_alias_set
);
1041 set_mem_alias_set (lab
, setjmp_alias_set
);
1042 set_mem_alias_set (stack
, setjmp_alias_set
);
1044 /* Pick up FP, label, and SP from the block and jump. This code is
1045 from expand_goto in stmt.cc; see there for detailed comments. */
1046 if (targetm
.have_nonlocal_goto ())
1047 /* We have to pass a value to the nonlocal_goto pattern that will
1048 get copied into the static_chain pointer, but it does not matter
1049 what that value is, because builtin_setjmp does not use it. */
1050 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1053 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1054 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1056 lab
= copy_to_reg (lab
);
1058 /* Restore the frame pointer and stack pointer. We must use a
1059 temporary since the setjmp buffer may be a local. */
1060 fp
= copy_to_reg (fp
);
1061 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1063 /* Ensure the frame pointer move is not optimized. */
1064 emit_insn (gen_blockage ());
1065 emit_clobber (hard_frame_pointer_rtx
);
1066 emit_clobber (frame_pointer_rtx
);
1067 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1069 emit_use (hard_frame_pointer_rtx
);
1070 emit_use (stack_pointer_rtx
);
1071 emit_indirect_jump (lab
);
1075 /* Search backwards and mark the jump insn as a non-local goto.
1076 Note that this precludes the use of __builtin_longjmp to a
1077 __builtin_setjmp target in the same function. However, we've
1078 already cautioned the user that these functions are for
1079 internal exception handling use only. */
1080 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1082 gcc_assert (insn
!= last
);
1086 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1089 else if (CALL_P (insn
))
1095 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1097 return (iter
->i
< iter
->n
);
1100 /* This function validates the types of a function call argument list
1101 against a specified list of tree_codes. If the last specifier is a 0,
1102 that represents an ellipsis, otherwise the last specifier must be a
1106 validate_arglist (const_tree callexpr
, ...)
1108 enum tree_code code
;
1111 const_call_expr_arg_iterator iter
;
1114 va_start (ap
, callexpr
);
1115 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1117 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1118 tree fn
= CALL_EXPR_FN (callexpr
);
1119 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1121 for (unsigned argno
= 1; ; ++argno
)
1123 code
= (enum tree_code
) va_arg (ap
, int);
1128 /* This signifies an ellipses, any further arguments are all ok. */
1132 /* This signifies an endlink, if no arguments remain, return
1133 true, otherwise return false. */
1134 res
= !more_const_call_expr_args_p (&iter
);
1137 /* The actual argument must be nonnull when either the whole
1138 called function has been declared nonnull, or when the formal
1139 argument corresponding to the actual argument has been. */
1141 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1143 arg
= next_const_call_expr_arg (&iter
);
1144 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1150 /* If no parameters remain or the parameter's code does not
1151 match the specified code, return false. Otherwise continue
1152 checking any remaining arguments. */
1153 arg
= next_const_call_expr_arg (&iter
);
1154 if (!validate_arg (arg
, code
))
1160 /* We need gotos here since we can only have one VA_CLOSE in a
1165 BITMAP_FREE (argmap
);
1170 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1171 and the address of the save area. */
1174 expand_builtin_nonlocal_goto (tree exp
)
1176 tree t_label
, t_save_area
;
1177 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1180 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1183 t_label
= CALL_EXPR_ARG (exp
, 0);
1184 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1186 r_label
= expand_normal (t_label
);
1187 r_label
= convert_memory_address (Pmode
, r_label
);
1188 r_save_area
= expand_normal (t_save_area
);
1189 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1190 /* Copy the address of the save location to a register just in case it was
1191 based on the frame pointer. */
1192 r_save_area
= copy_to_reg (r_save_area
);
1193 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1194 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1195 plus_constant (Pmode
, r_save_area
,
1196 GET_MODE_SIZE (Pmode
)));
1198 crtl
->has_nonlocal_goto
= 1;
1200 /* ??? We no longer need to pass the static chain value, afaik. */
1201 if (targetm
.have_nonlocal_goto ())
1202 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1205 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1206 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1208 r_label
= copy_to_reg (r_label
);
1210 /* Restore the frame pointer and stack pointer. We must use a
1211 temporary since the setjmp buffer may be a local. */
1212 r_fp
= copy_to_reg (r_fp
);
1213 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1215 /* Ensure the frame pointer move is not optimized. */
1216 emit_insn (gen_blockage ());
1217 emit_clobber (hard_frame_pointer_rtx
);
1218 emit_clobber (frame_pointer_rtx
);
1219 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1221 /* USE of hard_frame_pointer_rtx added for consistency;
1222 not clear if really needed. */
1223 emit_use (hard_frame_pointer_rtx
);
1224 emit_use (stack_pointer_rtx
);
1226 /* If the architecture is using a GP register, we must
1227 conservatively assume that the target function makes use of it.
1228 The prologue of functions with nonlocal gotos must therefore
1229 initialize the GP register to the appropriate value, and we
1230 must then make sure that this value is live at the point
1231 of the jump. (Note that this doesn't necessarily apply
1232 to targets with a nonlocal_goto pattern; they are free
1233 to implement it in their own way. Note also that this is
1234 a no-op if the GP register is a global invariant.) */
1235 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1236 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1237 emit_use (pic_offset_table_rtx
);
1239 emit_indirect_jump (r_label
);
1242 /* Search backwards to the jump insn and mark it as a
1244 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1248 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1251 else if (CALL_P (insn
))
1258 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1259 (not all will be used on all machines) that was passed to __builtin_setjmp.
1260 It updates the stack pointer in that block to the current value. This is
1261 also called directly by the SJLJ exception handling code. */
1264 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1266 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1267 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1269 = gen_rtx_MEM (sa_mode
,
1272 plus_constant (Pmode
, buf_addr
,
1273 2 * GET_MODE_SIZE (Pmode
))));
1275 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1278 /* Expand a call to __builtin_prefetch. For a target that does not support
1279 data prefetch, evaluate the memory address argument in case it has side
1283 expand_builtin_prefetch (tree exp
)
1285 tree arg0
, arg1
, arg2
;
1289 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1292 arg0
= CALL_EXPR_ARG (exp
, 0);
1294 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1295 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1297 nargs
= call_expr_nargs (exp
);
1299 arg1
= CALL_EXPR_ARG (exp
, 1);
1301 arg1
= integer_zero_node
;
1303 arg2
= CALL_EXPR_ARG (exp
, 2);
1305 arg2
= integer_three_node
;
1307 /* Argument 0 is an address. */
1308 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1310 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1311 if (TREE_CODE (arg1
) != INTEGER_CST
)
1313 error ("second argument to %<__builtin_prefetch%> must be a constant");
1314 arg1
= integer_zero_node
;
1316 op1
= expand_normal (arg1
);
1317 /* Argument 1 must be either zero or one. */
1318 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1320 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1325 /* Argument 2 (locality) must be a compile-time constant int. */
1326 if (TREE_CODE (arg2
) != INTEGER_CST
)
1328 error ("third argument to %<__builtin_prefetch%> must be a constant");
1329 arg2
= integer_zero_node
;
1331 op2
= expand_normal (arg2
);
1332 /* Argument 2 must be 0, 1, 2, or 3. */
1333 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1335 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1339 if (targetm
.have_prefetch ())
1341 class expand_operand ops
[3];
1343 create_address_operand (&ops
[0], op0
);
1344 create_integer_operand (&ops
[1], INTVAL (op1
));
1345 create_integer_operand (&ops
[2], INTVAL (op2
));
1346 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1350 /* Don't do anything with direct references to volatile memory, but
1351 generate code to handle other side effects. */
1352 if (!MEM_P (op0
) && side_effects_p (op0
))
1356 /* Get a MEM rtx for expression EXP which is the address of an operand
1357 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1358 the maximum length of the block of memory that might be accessed or
1362 get_memory_rtx (tree exp
, tree len
)
1364 tree orig_exp
= exp
, base
;
1367 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1368 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1369 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1370 exp
= TREE_OPERAND (exp
, 0);
1372 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1373 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1375 /* Get an expression we can use to find the attributes to assign to MEM.
1376 First remove any nops. */
1377 while (CONVERT_EXPR_P (exp
)
1378 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1379 exp
= TREE_OPERAND (exp
, 0);
1381 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1382 (as builtin stringops may alias with anything). */
1383 exp
= fold_build2 (MEM_REF
,
1384 build_array_type (char_type_node
,
1385 build_range_type (sizetype
,
1386 size_one_node
, len
)),
1387 exp
, build_int_cst (ptr_type_node
, 0));
1389 /* If the MEM_REF has no acceptable address, try to get the base object
1390 from the original address we got, and build an all-aliasing
1391 unknown-sized access to that one. */
1392 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1393 set_mem_attributes (mem
, exp
, 0);
1394 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1395 && (base
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1398 unsigned int align
= get_pointer_alignment (TREE_OPERAND (exp
, 0));
1399 exp
= build_fold_addr_expr (base
);
1400 exp
= fold_build2 (MEM_REF
,
1401 build_array_type (char_type_node
,
1402 build_range_type (sizetype
,
1405 exp
, build_int_cst (ptr_type_node
, 0));
1406 set_mem_attributes (mem
, exp
, 0);
1407 /* Since we stripped parts make sure the offset is unknown and the
1408 alignment is computed from the original address. */
1409 clear_mem_offset (mem
);
1410 set_mem_align (mem
, align
);
1412 set_mem_alias_set (mem
, 0);
1416 /* Built-in functions to perform an untyped call and return. */
1418 #define apply_args_mode \
1419 (this_target_builtins->x_apply_args_mode)
1420 #define apply_result_mode \
1421 (this_target_builtins->x_apply_result_mode)
1423 /* Return the size required for the block returned by __builtin_apply_args,
1424 and initialize apply_args_mode. */
1427 apply_args_size (void)
1429 static int size
= -1;
1433 /* The values computed by this function never change. */
1436 /* The first value is the incoming arg-pointer. */
1437 size
= GET_MODE_SIZE (Pmode
);
1439 /* The second value is the structure value address unless this is
1440 passed as an "invisible" first argument. */
1441 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1442 size
+= GET_MODE_SIZE (Pmode
);
1444 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1445 if (FUNCTION_ARG_REGNO_P (regno
))
1447 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1449 if (mode
!= VOIDmode
)
1451 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1452 if (size
% align
!= 0)
1453 size
= CEIL (size
, align
) * align
;
1454 size
+= GET_MODE_SIZE (mode
);
1455 apply_args_mode
[regno
] = mode
;
1458 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1461 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1466 /* Return the size required for the block returned by __builtin_apply,
1467 and initialize apply_result_mode. */
1470 apply_result_size (void)
1472 static int size
= -1;
1475 /* The values computed by this function never change. */
1480 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1481 if (targetm
.calls
.function_value_regno_p (regno
))
1483 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1485 if (mode
!= VOIDmode
)
1487 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1488 if (size
% align
!= 0)
1489 size
= CEIL (size
, align
) * align
;
1490 size
+= GET_MODE_SIZE (mode
);
1491 apply_result_mode
[regno
] = mode
;
1494 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1497 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1499 /* Allow targets that use untyped_call and untyped_return to override
1500 the size so that machine-specific information can be stored here. */
1501 #ifdef APPLY_RESULT_SIZE
1502 size
= APPLY_RESULT_SIZE
;
1508 /* Create a vector describing the result block RESULT. If SAVEP is true,
1509 the result block is used to save the values; otherwise it is used to
1510 restore the values. */
1513 result_vector (int savep
, rtx result
)
1515 int regno
, size
, align
, nelts
;
1516 fixed_size_mode mode
;
1518 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1521 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1522 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1524 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1525 if (size
% align
!= 0)
1526 size
= CEIL (size
, align
) * align
;
1527 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1528 mem
= adjust_address (result
, mode
, size
);
1529 savevec
[nelts
++] = (savep
1530 ? gen_rtx_SET (mem
, reg
)
1531 : gen_rtx_SET (reg
, mem
));
1532 size
+= GET_MODE_SIZE (mode
);
1534 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1537 /* Save the state required to perform an untyped call with the same
1538 arguments as were passed to the current function. */
1541 expand_builtin_apply_args_1 (void)
1544 int size
, align
, regno
;
1545 fixed_size_mode mode
;
1546 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1548 /* Create a block where the arg-pointer, structure value address,
1549 and argument registers can be saved. */
1550 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1552 /* Walk past the arg-pointer and structure value address. */
1553 size
= GET_MODE_SIZE (Pmode
);
1554 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1555 size
+= GET_MODE_SIZE (Pmode
);
1557 /* Save each register used in calling a function to the block. */
1558 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1559 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1561 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1562 if (size
% align
!= 0)
1563 size
= CEIL (size
, align
) * align
;
1565 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1567 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1568 size
+= GET_MODE_SIZE (mode
);
1571 /* Save the arg pointer to the block. */
1572 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1573 /* We need the pointer as the caller actually passed them to us, not
1574 as we might have pretended they were passed. Make sure it's a valid
1575 operand, as emit_move_insn isn't expected to handle a PLUS. */
1576 if (STACK_GROWS_DOWNWARD
)
1578 = force_operand (plus_constant (Pmode
, tem
,
1579 crtl
->args
.pretend_args_size
),
1581 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1583 size
= GET_MODE_SIZE (Pmode
);
1585 /* Save the structure value address unless this is passed as an
1586 "invisible" first argument. */
1587 if (struct_incoming_value
)
1588 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1589 copy_to_reg (struct_incoming_value
));
1591 /* Return the address of the block. */
1592 return copy_addr_to_reg (XEXP (registers
, 0));
1595 /* __builtin_apply_args returns block of memory allocated on
1596 the stack into which is stored the arg pointer, structure
1597 value address, static chain, and all the registers that might
1598 possibly be used in performing a function call. The code is
1599 moved to the start of the function so the incoming values are
1603 expand_builtin_apply_args (void)
1605 /* Don't do __builtin_apply_args more than once in a function.
1606 Save the result of the first call and reuse it. */
1607 if (apply_args_value
!= 0)
1608 return apply_args_value
;
1610 /* When this function is called, it means that registers must be
1611 saved on entry to this function. So we migrate the
1612 call to the first insn of this function. */
1616 temp
= expand_builtin_apply_args_1 ();
1617 rtx_insn
*seq
= get_insns ();
1620 apply_args_value
= temp
;
1622 /* Put the insns after the NOTE that starts the function.
1623 If this is inside a start_sequence, make the outer-level insn
1624 chain current, so the code is placed at the start of the
1625 function. If internal_arg_pointer is a non-virtual pseudo,
1626 it needs to be placed after the function that initializes
1628 push_topmost_sequence ();
1629 if (REG_P (crtl
->args
.internal_arg_pointer
)
1630 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1631 emit_insn_before (seq
, parm_birth_insn
);
1633 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1634 pop_topmost_sequence ();
1639 /* Perform an untyped call and save the state required to perform an
1640 untyped return of whatever value was returned by the given function. */
1643 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1645 int size
, align
, regno
;
1646 fixed_size_mode mode
;
1647 rtx incoming_args
, result
, reg
, dest
, src
;
1648 rtx_call_insn
*call_insn
;
1649 rtx old_stack_level
= 0;
1650 rtx call_fusage
= 0;
1651 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1653 arguments
= convert_memory_address (Pmode
, arguments
);
1655 /* Create a block where the return registers can be saved. */
1656 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1658 /* Fetch the arg pointer from the ARGUMENTS block. */
1659 incoming_args
= gen_reg_rtx (Pmode
);
1660 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1661 if (!STACK_GROWS_DOWNWARD
)
1662 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1663 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1665 /* Push a new argument block and copy the arguments. Do not allow
1666 the (potential) memcpy call below to interfere with our stack
1668 do_pending_stack_adjust ();
1671 /* Save the stack with nonlocal if available. */
1672 if (targetm
.have_save_stack_nonlocal ())
1673 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1675 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1677 /* Allocate a block of memory onto the stack and copy the memory
1678 arguments to the outgoing arguments address. We can pass TRUE
1679 as the 4th argument because we just saved the stack pointer
1680 and will restore it right after the call. */
1681 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1683 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1684 may have already set current_function_calls_alloca to true.
1685 current_function_calls_alloca won't be set if argsize is zero,
1686 so we have to guarantee need_drap is true here. */
1687 if (SUPPORTS_STACK_ALIGNMENT
)
1688 crtl
->need_drap
= true;
1690 dest
= virtual_outgoing_args_rtx
;
1691 if (!STACK_GROWS_DOWNWARD
)
1693 if (CONST_INT_P (argsize
))
1694 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1696 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1698 dest
= gen_rtx_MEM (BLKmode
, dest
);
1699 set_mem_align (dest
, PARM_BOUNDARY
);
1700 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1701 set_mem_align (src
, PARM_BOUNDARY
);
1702 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1704 /* Refer to the argument block. */
1706 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1707 set_mem_align (arguments
, PARM_BOUNDARY
);
1709 /* Walk past the arg-pointer and structure value address. */
1710 size
= GET_MODE_SIZE (Pmode
);
1712 size
+= GET_MODE_SIZE (Pmode
);
1714 /* Restore each of the registers previously saved. Make USE insns
1715 for each of these registers for use in making the call. */
1716 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1717 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1719 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1720 if (size
% align
!= 0)
1721 size
= CEIL (size
, align
) * align
;
1722 reg
= gen_rtx_REG (mode
, regno
);
1723 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1724 use_reg (&call_fusage
, reg
);
1725 size
+= GET_MODE_SIZE (mode
);
1728 /* Restore the structure value address unless this is passed as an
1729 "invisible" first argument. */
1730 size
= GET_MODE_SIZE (Pmode
);
1733 rtx value
= gen_reg_rtx (Pmode
);
1734 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1735 emit_move_insn (struct_value
, value
);
1736 if (REG_P (struct_value
))
1737 use_reg (&call_fusage
, struct_value
);
1740 /* All arguments and registers used for the call are set up by now! */
1741 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1743 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1744 and we don't want to load it into a register as an optimization,
1745 because prepare_call_address already did it if it should be done. */
1746 if (GET_CODE (function
) != SYMBOL_REF
)
1747 function
= memory_address (FUNCTION_MODE
, function
);
1749 /* Generate the actual call instruction and save the return value. */
1750 if (targetm
.have_untyped_call ())
1752 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1753 rtx_insn
*seq
= targetm
.gen_untyped_call (mem
, result
,
1754 result_vector (1, result
));
1755 for (rtx_insn
*insn
= seq
; insn
; insn
= NEXT_INSN (insn
))
1757 add_reg_note (insn
, REG_UNTYPED_CALL
, NULL_RTX
);
1760 else if (targetm
.have_call_value ())
1764 /* Locate the unique return register. It is not possible to
1765 express a call that sets more than one return register using
1766 call_value; use untyped_call for that. In fact, untyped_call
1767 only needs to save the return registers in the given block. */
1768 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1769 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1771 gcc_assert (!valreg
); /* have_untyped_call required. */
1773 valreg
= gen_rtx_REG (mode
, regno
);
1776 emit_insn (targetm
.gen_call_value (valreg
,
1777 gen_rtx_MEM (FUNCTION_MODE
, function
),
1778 const0_rtx
, NULL_RTX
, const0_rtx
));
1780 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1785 /* Find the CALL insn we just emitted, and attach the register usage
1787 call_insn
= last_call_insn ();
1788 add_function_usage_to (call_insn
, call_fusage
);
1790 /* Restore the stack. */
1791 if (targetm
.have_save_stack_nonlocal ())
1792 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1794 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1795 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1799 /* Return the address of the result block. */
1800 result
= copy_addr_to_reg (XEXP (result
, 0));
1801 return convert_memory_address (ptr_mode
, result
);
1804 /* Perform an untyped return. */
1807 expand_builtin_return (rtx result
)
1809 int size
, align
, regno
;
1810 fixed_size_mode mode
;
1812 rtx_insn
*call_fusage
= 0;
1814 result
= convert_memory_address (Pmode
, result
);
1816 apply_result_size ();
1817 result
= gen_rtx_MEM (BLKmode
, result
);
1819 if (targetm
.have_untyped_return ())
1821 rtx vector
= result_vector (0, result
);
1822 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1827 /* Restore the return value and note that each value is used. */
1829 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1830 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1832 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1833 if (size
% align
!= 0)
1834 size
= CEIL (size
, align
) * align
;
1835 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1836 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1838 push_to_sequence (call_fusage
);
1840 call_fusage
= get_insns ();
1842 size
+= GET_MODE_SIZE (mode
);
1845 /* Put the USE insns before the return. */
1846 emit_insn (call_fusage
);
1848 /* Return whatever values was restored by jumping directly to the end
1850 expand_naked_return ();
1853 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1856 type_to_class (tree type
)
1858 switch (TREE_CODE (type
))
1860 case VOID_TYPE
: return void_type_class
;
1861 case INTEGER_TYPE
: return integer_type_class
;
1862 case ENUMERAL_TYPE
: return enumeral_type_class
;
1863 case BOOLEAN_TYPE
: return boolean_type_class
;
1864 case POINTER_TYPE
: return pointer_type_class
;
1865 case REFERENCE_TYPE
: return reference_type_class
;
1866 case OFFSET_TYPE
: return offset_type_class
;
1867 case REAL_TYPE
: return real_type_class
;
1868 case COMPLEX_TYPE
: return complex_type_class
;
1869 case FUNCTION_TYPE
: return function_type_class
;
1870 case METHOD_TYPE
: return method_type_class
;
1871 case RECORD_TYPE
: return record_type_class
;
1873 case QUAL_UNION_TYPE
: return union_type_class
;
1874 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1875 ? string_type_class
: array_type_class
);
1876 case LANG_TYPE
: return lang_type_class
;
1877 case OPAQUE_TYPE
: return opaque_type_class
;
1878 case BITINT_TYPE
: return bitint_type_class
;
1879 default: return no_type_class
;
1883 /* Expand a call EXP to __builtin_classify_type. */
1886 expand_builtin_classify_type (tree exp
)
1888 if (call_expr_nargs (exp
))
1889 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1890 return GEN_INT (no_type_class
);
1893 /* This helper macro, meant to be used in mathfn_built_in below, determines
1894 which among a set of builtin math functions is appropriate for a given type
1895 mode. The `F' (float) and `L' (long double) are automatically generated
1896 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1897 types, there are additional types that are considered with 'F32', 'F64',
1898 'F128', etc. suffixes. */
1899 #define CASE_MATHFN(MATHFN) \
1900 CASE_CFN_##MATHFN: \
1901 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1902 fcodel = BUILT_IN_##MATHFN##L ; break;
1903 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1905 #define CASE_MATHFN_FLOATN(MATHFN) \
1906 CASE_CFN_##MATHFN: \
1907 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1908 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1909 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1910 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1911 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1913 /* Similar to above, but appends _R after any F/L suffix. */
1914 #define CASE_MATHFN_REENT(MATHFN) \
1915 case CFN_BUILT_IN_##MATHFN##_R: \
1916 case CFN_BUILT_IN_##MATHFN##F_R: \
1917 case CFN_BUILT_IN_##MATHFN##L_R: \
1918 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1919 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1921 /* Return a function equivalent to FN but operating on floating-point
1922 values of type TYPE, or END_BUILTINS if no such function exists.
1923 This is purely an operation on function codes; it does not guarantee
1924 that the target actually has an implementation of the function. */
1926 static built_in_function
1927 mathfn_built_in_2 (tree type
, combined_fn fn
)
1930 built_in_function fcode
, fcodef
, fcodel
;
1931 built_in_function fcodef16
= END_BUILTINS
;
1932 built_in_function fcodef32
= END_BUILTINS
;
1933 built_in_function fcodef64
= END_BUILTINS
;
1934 built_in_function fcodef128
= END_BUILTINS
;
1935 built_in_function fcodef32x
= END_BUILTINS
;
1936 built_in_function fcodef64x
= END_BUILTINS
;
1937 built_in_function fcodef128x
= END_BUILTINS
;
1939 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1940 break the uses below. */
1946 #define SEQ_OF_CASE_MATHFN \
1947 CASE_MATHFN_FLOATN (ACOS) \
1948 CASE_MATHFN_FLOATN (ACOSH) \
1949 CASE_MATHFN_FLOATN (ASIN) \
1950 CASE_MATHFN_FLOATN (ASINH) \
1951 CASE_MATHFN_FLOATN (ATAN) \
1952 CASE_MATHFN_FLOATN (ATAN2) \
1953 CASE_MATHFN_FLOATN (ATANH) \
1954 CASE_MATHFN_FLOATN (CBRT) \
1955 CASE_MATHFN_FLOATN (CEIL) \
1956 CASE_MATHFN (CEXPI) \
1957 CASE_MATHFN_FLOATN (COPYSIGN) \
1958 CASE_MATHFN_FLOATN (COS) \
1959 CASE_MATHFN_FLOATN (COSH) \
1960 CASE_MATHFN (DREM) \
1961 CASE_MATHFN_FLOATN (ERF) \
1962 CASE_MATHFN_FLOATN (ERFC) \
1963 CASE_MATHFN_FLOATN (EXP) \
1964 CASE_MATHFN (EXP10) \
1965 CASE_MATHFN_FLOATN (EXP2) \
1966 CASE_MATHFN_FLOATN (EXPM1) \
1967 CASE_MATHFN_FLOATN (FABS) \
1968 CASE_MATHFN_FLOATN (FDIM) \
1969 CASE_MATHFN_FLOATN (FLOOR) \
1970 CASE_MATHFN_FLOATN (FMA) \
1971 CASE_MATHFN_FLOATN (FMAX) \
1972 CASE_MATHFN_FLOATN (FMIN) \
1973 CASE_MATHFN_FLOATN (FMOD) \
1974 CASE_MATHFN_FLOATN (FREXP) \
1975 CASE_MATHFN (GAMMA) \
1976 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1977 CASE_MATHFN_FLOATN (HUGE_VAL) \
1978 CASE_MATHFN_FLOATN (HYPOT) \
1979 CASE_MATHFN_FLOATN (ILOGB) \
1980 CASE_MATHFN (ICEIL) \
1981 CASE_MATHFN (IFLOOR) \
1982 CASE_MATHFN_FLOATN (INF) \
1983 CASE_MATHFN (IRINT) \
1984 CASE_MATHFN (IROUND) \
1985 CASE_MATHFN (ISINF) \
1989 CASE_MATHFN (LCEIL) \
1990 CASE_MATHFN_FLOATN (LDEXP) \
1991 CASE_MATHFN (LFLOOR) \
1992 CASE_MATHFN_FLOATN (LGAMMA) \
1993 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1994 CASE_MATHFN (LLCEIL) \
1995 CASE_MATHFN (LLFLOOR) \
1996 CASE_MATHFN_FLOATN (LLRINT) \
1997 CASE_MATHFN_FLOATN (LLROUND) \
1998 CASE_MATHFN_FLOATN (LOG) \
1999 CASE_MATHFN_FLOATN (LOG10) \
2000 CASE_MATHFN_FLOATN (LOG1P) \
2001 CASE_MATHFN_FLOATN (LOG2) \
2002 CASE_MATHFN_FLOATN (LOGB) \
2003 CASE_MATHFN_FLOATN (LRINT) \
2004 CASE_MATHFN_FLOATN (LROUND) \
2005 CASE_MATHFN_FLOATN (MODF) \
2006 CASE_MATHFN_FLOATN (NAN) \
2007 CASE_MATHFN_FLOATN (NANS) \
2008 CASE_MATHFN_FLOATN (NEARBYINT) \
2009 CASE_MATHFN_FLOATN (NEXTAFTER) \
2010 CASE_MATHFN (NEXTTOWARD) \
2011 CASE_MATHFN_FLOATN (POW) \
2012 CASE_MATHFN (POWI) \
2013 CASE_MATHFN (POW10) \
2014 CASE_MATHFN_FLOATN (REMAINDER) \
2015 CASE_MATHFN_FLOATN (REMQUO) \
2016 CASE_MATHFN_FLOATN (RINT) \
2017 CASE_MATHFN_FLOATN (ROUND) \
2018 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2019 CASE_MATHFN (SCALB) \
2020 CASE_MATHFN_FLOATN (SCALBLN) \
2021 CASE_MATHFN_FLOATN (SCALBN) \
2022 CASE_MATHFN (SIGNBIT) \
2023 CASE_MATHFN (SIGNIFICAND) \
2024 CASE_MATHFN_FLOATN (SIN) \
2025 CASE_MATHFN (SINCOS) \
2026 CASE_MATHFN_FLOATN (SINH) \
2027 CASE_MATHFN_FLOATN (SQRT) \
2028 CASE_MATHFN_FLOATN (TAN) \
2029 CASE_MATHFN_FLOATN (TANH) \
2030 CASE_MATHFN_FLOATN (TGAMMA) \
2031 CASE_MATHFN_FLOATN (TRUNC) \
2039 return END_BUILTINS
;
2042 mtype
= TYPE_MAIN_VARIANT (type
);
2043 if (mtype
== double_type_node
)
2045 else if (mtype
== float_type_node
)
2047 else if (mtype
== long_double_type_node
)
2049 else if (mtype
== float16_type_node
)
2051 else if (mtype
== float32_type_node
)
2053 else if (mtype
== float64_type_node
)
2055 else if (mtype
== float128_type_node
)
2057 else if (mtype
== float32x_type_node
)
2059 else if (mtype
== float64x_type_node
)
2061 else if (mtype
== float128x_type_node
)
2064 return END_BUILTINS
;
2068 #undef CASE_MATHFN_FLOATN
2069 #undef CASE_MATHFN_REENT
2071 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2072 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2073 otherwise use the explicit declaration. If we can't do the conversion,
2077 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2079 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2080 if (fcode2
== END_BUILTINS
)
2083 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2086 return builtin_decl_explicit (fcode2
);
2089 /* Like mathfn_built_in_1, but always use the implicit array. */
2092 mathfn_built_in (tree type
, combined_fn fn
)
2094 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2097 /* Like mathfn_built_in_1, but always use the explicit array. */
2100 mathfn_built_in_explicit (tree type
, combined_fn fn
)
2102 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 0);
2105 /* Like mathfn_built_in_1, but take a built_in_function and
2106 always use the implicit array. */
2109 mathfn_built_in (tree type
, enum built_in_function fn
)
2111 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2114 /* Return the type associated with a built in function, i.e., the one
2115 to be passed to mathfn_built_in to get the type-specific
2119 mathfn_built_in_type (combined_fn fn
)
2121 #define CASE_MATHFN(MATHFN) \
2122 case CFN_BUILT_IN_##MATHFN: \
2123 return double_type_node; \
2124 case CFN_BUILT_IN_##MATHFN##F: \
2125 return float_type_node; \
2126 case CFN_BUILT_IN_##MATHFN##L: \
2127 return long_double_type_node;
2129 #define CASE_MATHFN_FLOATN(MATHFN) \
2130 CASE_MATHFN(MATHFN) \
2131 case CFN_BUILT_IN_##MATHFN##F16: \
2132 return float16_type_node; \
2133 case CFN_BUILT_IN_##MATHFN##F32: \
2134 return float32_type_node; \
2135 case CFN_BUILT_IN_##MATHFN##F64: \
2136 return float64_type_node; \
2137 case CFN_BUILT_IN_##MATHFN##F128: \
2138 return float128_type_node; \
2139 case CFN_BUILT_IN_##MATHFN##F32X: \
2140 return float32x_type_node; \
2141 case CFN_BUILT_IN_##MATHFN##F64X: \
2142 return float64x_type_node; \
2143 case CFN_BUILT_IN_##MATHFN##F128X: \
2144 return float128x_type_node;
2146 /* Similar to above, but appends _R after any F/L suffix. */
2147 #define CASE_MATHFN_REENT(MATHFN) \
2148 case CFN_BUILT_IN_##MATHFN##_R: \
2149 return double_type_node; \
2150 case CFN_BUILT_IN_##MATHFN##F_R: \
2151 return float_type_node; \
2152 case CFN_BUILT_IN_##MATHFN##L_R: \
2153 return long_double_type_node;
2164 #undef CASE_MATHFN_FLOATN
2165 #undef CASE_MATHFN_REENT
2166 #undef SEQ_OF_CASE_MATHFN
2169 /* Check whether there is an internal function associated with function FN
2170 and return type RETURN_TYPE. Return the function if so, otherwise return
2173 Note that this function only tests whether the function is defined in
2174 internals.def, not whether it is actually available on the target. */
2177 associated_internal_fn (built_in_function fn
, tree return_type
)
2181 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2182 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2183 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2184 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2185 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2186 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2187 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2188 #include "internal-fn.def"
2190 CASE_FLT_FN (BUILT_IN_POW10
):
2193 CASE_FLT_FN (BUILT_IN_DREM
):
2194 return IFN_REMAINDER
;
2196 CASE_FLT_FN (BUILT_IN_SCALBN
):
2197 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2198 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2207 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2208 return its code, otherwise return IFN_LAST. Note that this function
2209 only tests whether the function is defined in internals.def, not whether
2210 it is actually available on the target. */
2213 associated_internal_fn (tree fndecl
)
2215 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2216 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl
),
2217 TREE_TYPE (TREE_TYPE (fndecl
)));
2220 /* Check whether there is an internal function associated with function CFN
2221 and return type RETURN_TYPE. Return the function if so, otherwise return
2224 Note that this function only tests whether the function is defined in
2225 internals.def, not whether it is actually available on the target. */
2228 associated_internal_fn (combined_fn cfn
, tree return_type
)
2230 if (internal_fn_p (cfn
))
2231 return as_internal_fn (cfn
);
2232 return associated_internal_fn (as_builtin_fn (cfn
), return_type
);
2235 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2236 on the current target by a call to an internal function, return the
2237 code of that internal function, otherwise return IFN_LAST. The caller
2238 is responsible for ensuring that any side-effects of the built-in
2239 call are dealt with correctly. E.g. if CALL sets errno, the caller
2240 must decide that the errno result isn't needed or make it available
2241 in some other way. */
2244 replacement_internal_fn (gcall
*call
)
2246 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2248 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2249 if (ifn
!= IFN_LAST
)
2251 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2252 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2253 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2260 /* Expand a call to the builtin trinary math functions (fma).
2261 Return NULL_RTX if a normal call should be emitted rather than expanding the
2262 function in-line. EXP is the expression that is a call to the builtin
2263 function; if convenient, the result should be placed in TARGET.
2264 SUBTARGET may be used as the target for computing one of EXP's
2268 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2270 optab builtin_optab
;
2271 rtx op0
, op1
, op2
, result
;
2273 tree fndecl
= get_callee_fndecl (exp
);
2274 tree arg0
, arg1
, arg2
;
2277 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2280 arg0
= CALL_EXPR_ARG (exp
, 0);
2281 arg1
= CALL_EXPR_ARG (exp
, 1);
2282 arg2
= CALL_EXPR_ARG (exp
, 2);
2284 switch (DECL_FUNCTION_CODE (fndecl
))
2286 CASE_FLT_FN (BUILT_IN_FMA
):
2287 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2288 builtin_optab
= fma_optab
; break;
2293 /* Make a suitable register to place result in. */
2294 mode
= TYPE_MODE (TREE_TYPE (exp
));
2296 /* Before working hard, check whether the instruction is available. */
2297 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2300 result
= gen_reg_rtx (mode
);
2302 /* Always stabilize the argument list. */
2303 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2304 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2305 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2307 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2308 op1
= expand_normal (arg1
);
2309 op2
= expand_normal (arg2
);
2313 /* Compute into RESULT.
2314 Set RESULT to wherever the result comes back. */
2315 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2318 /* If we were unable to expand via the builtin, stop the sequence
2319 (without outputting the insns) and call to the library function
2320 with the stabilized argument list. */
2324 return expand_call (exp
, target
, target
== const0_rtx
);
2327 /* Output the entire sequence. */
2328 insns
= get_insns ();
2335 /* Expand a call to the builtin sin and cos math functions.
2336 Return NULL_RTX if a normal call should be emitted rather than expanding the
2337 function in-line. EXP is the expression that is a call to the builtin
2338 function; if convenient, the result should be placed in TARGET.
2339 SUBTARGET may be used as the target for computing one of EXP's
2343 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2345 optab builtin_optab
;
2348 tree fndecl
= get_callee_fndecl (exp
);
2352 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2355 arg
= CALL_EXPR_ARG (exp
, 0);
2357 switch (DECL_FUNCTION_CODE (fndecl
))
2359 CASE_FLT_FN (BUILT_IN_SIN
):
2360 CASE_FLT_FN (BUILT_IN_COS
):
2361 builtin_optab
= sincos_optab
; break;
2366 /* Make a suitable register to place result in. */
2367 mode
= TYPE_MODE (TREE_TYPE (exp
));
2369 /* Check if sincos insn is available, otherwise fallback
2370 to sin or cos insn. */
2371 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2372 switch (DECL_FUNCTION_CODE (fndecl
))
2374 CASE_FLT_FN (BUILT_IN_SIN
):
2375 builtin_optab
= sin_optab
; break;
2376 CASE_FLT_FN (BUILT_IN_COS
):
2377 builtin_optab
= cos_optab
; break;
2382 /* Before working hard, check whether the instruction is available. */
2383 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2385 rtx result
= gen_reg_rtx (mode
);
2387 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2388 need to expand the argument again. This way, we will not perform
2389 side-effects more the once. */
2390 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2392 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2396 /* Compute into RESULT.
2397 Set RESULT to wherever the result comes back. */
2398 if (builtin_optab
== sincos_optab
)
2402 switch (DECL_FUNCTION_CODE (fndecl
))
2404 CASE_FLT_FN (BUILT_IN_SIN
):
2405 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2407 CASE_FLT_FN (BUILT_IN_COS
):
2408 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2416 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2420 /* Output the entire sequence. */
2421 insns
= get_insns ();
2427 /* If we were unable to expand via the builtin, stop the sequence
2428 (without outputting the insns) and call to the library function
2429 with the stabilized argument list. */
2433 return expand_call (exp
, target
, target
== const0_rtx
);
2436 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2437 return an RTL instruction code that implements the functionality.
2438 If that isn't possible or available return CODE_FOR_nothing. */
2440 static enum insn_code
2441 interclass_mathfn_icode (tree arg
, tree fndecl
)
2443 bool errno_set
= false;
2444 optab builtin_optab
= unknown_optab
;
2447 switch (DECL_FUNCTION_CODE (fndecl
))
2449 CASE_FLT_FN (BUILT_IN_ILOGB
):
2450 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2451 CASE_FLT_FN (BUILT_IN_ISINF
):
2452 builtin_optab
= isinf_optab
; break;
2453 case BUILT_IN_ISNORMAL
:
2454 case BUILT_IN_ISFINITE
:
2455 CASE_FLT_FN (BUILT_IN_FINITE
):
2456 case BUILT_IN_FINITED32
:
2457 case BUILT_IN_FINITED64
:
2458 case BUILT_IN_FINITED128
:
2459 case BUILT_IN_ISINFD32
:
2460 case BUILT_IN_ISINFD64
:
2461 case BUILT_IN_ISINFD128
:
2462 /* These builtins have no optabs (yet). */
2468 /* There's no easy way to detect the case we need to set EDOM. */
2469 if (flag_errno_math
&& errno_set
)
2470 return CODE_FOR_nothing
;
2472 /* Optab mode depends on the mode of the input argument. */
2473 mode
= TYPE_MODE (TREE_TYPE (arg
));
2476 return optab_handler (builtin_optab
, mode
);
2477 return CODE_FOR_nothing
;
2480 /* Expand a call to one of the builtin math functions that operate on
2481 floating point argument and output an integer result (ilogb, isinf,
2483 Return 0 if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function; if convenient, the result should be placed in TARGET. */
2488 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2490 enum insn_code icode
= CODE_FOR_nothing
;
2492 tree fndecl
= get_callee_fndecl (exp
);
2496 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2499 arg
= CALL_EXPR_ARG (exp
, 0);
2500 icode
= interclass_mathfn_icode (arg
, fndecl
);
2501 mode
= TYPE_MODE (TREE_TYPE (arg
));
2503 if (icode
!= CODE_FOR_nothing
)
2505 class expand_operand ops
[1];
2506 rtx_insn
*last
= get_last_insn ();
2507 tree orig_arg
= arg
;
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2514 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2516 if (mode
!= GET_MODE (op0
))
2517 op0
= convert_to_mode (mode
, op0
, 0);
2519 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2520 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2521 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2522 return ops
[0].value
;
2524 delete_insns_since (last
);
2525 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2531 /* Expand a call to the builtin sincos math function.
2532 Return NULL_RTX if a normal call should be emitted rather than expanding the
2533 function in-line. EXP is the expression that is a call to the builtin
2537 expand_builtin_sincos (tree exp
)
2539 rtx op0
, op1
, op2
, target1
, target2
;
2541 tree arg
, sinp
, cosp
;
2543 location_t loc
= EXPR_LOCATION (exp
);
2544 tree alias_type
, alias_off
;
2546 if (!validate_arglist (exp
, REAL_TYPE
,
2547 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2550 arg
= CALL_EXPR_ARG (exp
, 0);
2551 sinp
= CALL_EXPR_ARG (exp
, 1);
2552 cosp
= CALL_EXPR_ARG (exp
, 2);
2554 /* Make a suitable register to place result in. */
2555 mode
= TYPE_MODE (TREE_TYPE (arg
));
2557 /* Check if sincos insn is available, otherwise emit the call. */
2558 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2561 target1
= gen_reg_rtx (mode
);
2562 target2
= gen_reg_rtx (mode
);
2564 op0
= expand_normal (arg
);
2565 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2566 alias_off
= build_int_cst (alias_type
, 0);
2567 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2569 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2572 /* Compute into target1 and target2.
2573 Set TARGET to wherever the result comes back. */
2574 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2575 gcc_assert (result
);
2577 /* Move target1 and target2 to the memory locations indicated
2579 emit_move_insn (op1
, target1
);
2580 emit_move_insn (op2
, target2
);
2585 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2586 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2588 expand_builtin_fegetround (tree exp
, rtx target
, machine_mode target_mode
)
2590 if (!validate_arglist (exp
, VOID_TYPE
))
2593 insn_code icode
= direct_optab_handler (fegetround_optab
, SImode
);
2594 if (icode
== CODE_FOR_nothing
)
2598 || GET_MODE (target
) != target_mode
2599 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, target_mode
))
2600 target
= gen_reg_rtx (target_mode
);
2602 rtx pat
= GEN_FCN (icode
) (target
);
2610 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2611 fenv.h), returning the result and setting it in TARGET. Otherwise return
2612 NULL_RTX on failure. */
2614 expand_builtin_feclear_feraise_except (tree exp
, rtx target
,
2615 machine_mode target_mode
, optab op_optab
)
2617 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
2619 rtx op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2621 insn_code icode
= direct_optab_handler (op_optab
, SImode
);
2622 if (icode
== CODE_FOR_nothing
)
2625 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, GET_MODE (op0
)))
2629 || GET_MODE (target
) != target_mode
2630 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, target_mode
))
2631 target
= gen_reg_rtx (target_mode
);
2633 rtx pat
= GEN_FCN (icode
) (target
, op0
);
2641 /* Expand a call to the internal cexpi builtin to the sincos math function.
2642 EXP is the expression that is a call to the builtin function; if convenient,
2643 the result should be placed in TARGET. */
2646 expand_builtin_cexpi (tree exp
, rtx target
)
2648 tree fndecl
= get_callee_fndecl (exp
);
2652 location_t loc
= EXPR_LOCATION (exp
);
2654 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2657 arg
= CALL_EXPR_ARG (exp
, 0);
2658 type
= TREE_TYPE (arg
);
2659 mode
= TYPE_MODE (TREE_TYPE (arg
));
2661 /* Try expanding via a sincos optab, fall back to emitting a libcall
2662 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2663 is only generated from sincos, cexp or if we have either of them. */
2664 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2666 op1
= gen_reg_rtx (mode
);
2667 op2
= gen_reg_rtx (mode
);
2669 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2671 /* Compute into op1 and op2. */
2672 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2674 else if (targetm
.libc_has_function (function_sincos
, type
))
2676 tree call
, fn
= NULL_TREE
;
2680 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2681 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2682 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2683 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2684 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2685 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2689 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2690 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2691 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2692 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2693 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2694 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2696 /* Make sure not to fold the sincos call again. */
2697 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2698 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2699 call
, 3, arg
, top1
, top2
));
2703 tree call
, fn
= NULL_TREE
, narg
;
2704 tree ctype
= build_complex_type (type
);
2706 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2707 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2708 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2709 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2710 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2711 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2715 /* If we don't have a decl for cexp create one. This is the
2716 friendliest fallback if the user calls __builtin_cexpi
2717 without full target C99 function support. */
2718 if (fn
== NULL_TREE
)
2721 const char *name
= NULL
;
2723 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2725 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2727 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2730 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2731 fn
= build_fn_decl (name
, fntype
);
2734 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2735 build_real (type
, dconst0
), arg
);
2737 /* Make sure not to fold the cexp call again. */
2738 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2739 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2740 target
, VOIDmode
, EXPAND_NORMAL
);
2743 /* Now build the proper return type. */
2744 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2745 make_tree (TREE_TYPE (arg
), op2
),
2746 make_tree (TREE_TYPE (arg
), op1
)),
2747 target
, VOIDmode
, EXPAND_NORMAL
);
2750 /* Conveniently construct a function call expression. FNDECL names the
2751 function to be called, N is the number of arguments, and the "..."
2752 parameters are the argument expressions. Unlike build_call_exr
2753 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2756 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2759 tree fntype
= TREE_TYPE (fndecl
);
2760 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2763 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2765 SET_EXPR_LOCATION (fn
, loc
);
2769 /* Expand the __builtin_issignaling builtin. This needs to handle
2770 all floating point formats that do support NaNs (for those that
2771 don't it just sets target to 0). */
2774 expand_builtin_issignaling (tree exp
, rtx target
)
2776 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2779 tree arg
= CALL_EXPR_ARG (exp
, 0);
2780 scalar_float_mode fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
2781 const struct real_format
*fmt
= REAL_MODE_FORMAT (fmode
);
2783 /* Expand the argument yielding a RTX expression. */
2784 rtx temp
= expand_normal (arg
);
2786 /* If mode doesn't support NaN, always return 0.
2787 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2788 __builtin_issignaling working without -fsignaling-nans. Especially
2789 when -fno-signaling-nans is the default.
2790 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2791 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2792 fold to 0 or non-NaN/Inf classification. */
2793 if (!HONOR_NANS (fmode
))
2795 emit_move_insn (target
, const0_rtx
);
2799 /* Check if the back end provides an insn that handles issignaling for the
2801 enum insn_code icode
= optab_handler (issignaling_optab
, fmode
);
2802 if (icode
!= CODE_FOR_nothing
)
2804 rtx_insn
*last
= get_last_insn ();
2805 rtx this_target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2806 if (maybe_emit_unop_insn (icode
, this_target
, temp
, UNKNOWN
))
2808 delete_insns_since (last
);
2811 if (DECIMAL_FLOAT_MODE_P (fmode
))
2813 scalar_int_mode imode
;
2815 switch (fmt
->ieee_bits
)
2819 imode
= int_mode_for_mode (fmode
).require ();
2820 temp
= gen_lowpart (imode
, temp
);
2823 imode
= int_mode_for_size (64, 1).require ();
2825 /* For decimal128, TImode support isn't always there and even when
2826 it is, working on the DImode high part is usually better. */
2829 if (rtx t
= simplify_gen_subreg (imode
, temp
, fmode
,
2830 subreg_highpart_offset (imode
,
2835 scalar_int_mode imode2
;
2836 if (int_mode_for_mode (fmode
).exists (&imode2
))
2838 rtx temp2
= gen_lowpart (imode2
, temp
);
2839 poly_uint64 off
= subreg_highpart_offset (imode
, imode2
);
2840 if (rtx t
= simplify_gen_subreg (imode
, temp2
,
2847 rtx mem
= assign_stack_temp (fmode
, GET_MODE_SIZE (fmode
));
2848 emit_move_insn (mem
, temp
);
2855 = subreg_highpart_offset (imode
, GET_MODE (temp
));
2856 hi
= adjust_address (temp
, imode
, offset
);
2863 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2864 have 6 bits below it all set. */
2866 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode
) - 7));
2867 temp
= expand_binop (imode
, and_optab
, temp
, val
,
2868 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2869 temp
= emit_store_flag_force (target
, EQ
, temp
, val
, imode
, 1, 1);
2873 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2874 gcc_assert (FLOAT_WORDS_BIG_ENDIAN
== WORDS_BIG_ENDIAN
);
2875 gcc_assert (fmt
->signbit_ro
> 0 && fmt
->b
== 2);
2876 gcc_assert (MODE_COMPOSITE_P (fmode
)
2877 || (fmt
->pnan
== fmt
->p
2878 && fmt
->signbit_ro
== fmt
->signbit_rw
));
2882 case 106: /* IBM double double */
2883 /* For IBM double double, recurse on the most significant double. */
2884 gcc_assert (MODE_COMPOSITE_P (fmode
));
2885 temp
= convert_modes (DFmode
, fmode
, temp
, 0);
2887 fmt
= REAL_MODE_FORMAT (DFmode
);
2889 case 8: /* bfloat */
2890 case 11: /* IEEE half */
2891 case 24: /* IEEE single */
2892 case 53: /* IEEE double or Intel extended with rounding to double */
2893 if (fmt
->p
== 53 && fmt
->signbit_ro
== 79)
2896 scalar_int_mode imode
= int_mode_for_mode (fmode
).require ();
2897 temp
= gen_lowpart (imode
, temp
);
2898 rtx val
= GEN_INT ((HOST_WIDE_INT_M1U
<< (fmt
->p
- 2))
2899 & ~(HOST_WIDE_INT_M1U
<< fmt
->signbit_ro
));
2900 if (fmt
->qnan_msb_set
)
2902 rtx mask
= GEN_INT (~(HOST_WIDE_INT_M1U
<< fmt
->signbit_ro
));
2903 rtx bit
= GEN_INT (HOST_WIDE_INT_1U
<< (fmt
->p
- 2));
2904 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2905 ((temp ^ bit) & mask) > val. */
2906 temp
= expand_binop (imode
, xor_optab
, temp
, bit
,
2907 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2908 temp
= expand_binop (imode
, and_optab
, temp
, mask
,
2909 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2910 temp
= emit_store_flag_force (target
, GTU
, temp
, val
, imode
,
2915 /* For MIPS/PA IEEE single/double, expand to:
2916 (temp & val) == val. */
2917 temp
= expand_binop (imode
, and_optab
, temp
, val
,
2918 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2919 temp
= emit_store_flag_force (target
, EQ
, temp
, val
, imode
,
2924 case 113: /* IEEE quad */
2926 rtx hi
= NULL_RTX
, lo
= NULL_RTX
;
2927 scalar_int_mode imode
= int_mode_for_size (64, 1).require ();
2928 /* For IEEE quad, TImode support isn't always there and even when
2929 it is, working on DImode parts is usually better. */
2932 hi
= simplify_gen_subreg (imode
, temp
, fmode
,
2933 subreg_highpart_offset (imode
, fmode
));
2934 lo
= simplify_gen_subreg (imode
, temp
, fmode
,
2935 subreg_lowpart_offset (imode
, fmode
));
2938 scalar_int_mode imode2
;
2939 if (int_mode_for_mode (fmode
).exists (&imode2
))
2941 rtx temp2
= gen_lowpart (imode2
, temp
);
2942 hi
= simplify_gen_subreg (imode
, temp2
, imode2
,
2943 subreg_highpart_offset (imode
,
2945 lo
= simplify_gen_subreg (imode
, temp2
, imode2
,
2946 subreg_lowpart_offset (imode
,
2952 rtx mem
= assign_stack_temp (fmode
, GET_MODE_SIZE (fmode
));
2953 emit_move_insn (mem
, temp
);
2960 = subreg_highpart_offset (imode
, GET_MODE (temp
));
2961 hi
= adjust_address (temp
, imode
, offset
);
2962 offset
= subreg_lowpart_offset (imode
, GET_MODE (temp
));
2963 lo
= adjust_address (temp
, imode
, offset
);
2965 rtx val
= GEN_INT ((HOST_WIDE_INT_M1U
<< (fmt
->p
- 2 - 64))
2966 & ~(HOST_WIDE_INT_M1U
<< (fmt
->signbit_ro
- 64)));
2967 if (fmt
->qnan_msb_set
)
2969 rtx mask
= GEN_INT (~(HOST_WIDE_INT_M1U
<< (fmt
->signbit_ro
2971 rtx bit
= GEN_INT (HOST_WIDE_INT_1U
<< (fmt
->p
- 2 - 64));
2972 /* For non-MIPS/PA IEEE quad, expand to:
2973 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2974 rtx nlo
= expand_unop (imode
, neg_optab
, lo
, NULL_RTX
, 0);
2975 lo
= expand_binop (imode
, ior_optab
, lo
, nlo
,
2976 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2977 lo
= expand_shift (RSHIFT_EXPR
, imode
, lo
, 63, NULL_RTX
, 1);
2978 temp
= expand_binop (imode
, xor_optab
, hi
, bit
,
2979 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2980 temp
= expand_binop (imode
, ior_optab
, temp
, lo
,
2981 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2982 temp
= expand_binop (imode
, and_optab
, temp
, mask
,
2983 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2984 temp
= emit_store_flag_force (target
, GTU
, temp
, val
, imode
,
2989 /* For MIPS/PA IEEE quad, expand to:
2990 (hi & val) == val. */
2991 temp
= expand_binop (imode
, and_optab
, hi
, val
,
2992 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
2993 temp
= emit_store_flag_force (target
, EQ
, temp
, val
, imode
,
2998 case 64: /* Intel or Motorola extended */
3002 scalar_int_mode imode
= int_mode_for_size (32, 1).require ();
3003 scalar_int_mode iemode
= int_mode_for_size (16, 1).require ();
3006 rtx mem
= assign_stack_temp (fmode
, GET_MODE_SIZE (fmode
));
3007 emit_move_insn (mem
, temp
);
3010 if (fmt
->signbit_ro
== 95)
3012 /* Motorola, always big endian, with 16-bit gap in between
3013 16-bit sign+exponent and 64-bit mantissa. */
3014 ex
= adjust_address (temp
, iemode
, 0);
3015 hi
= adjust_address (temp
, imode
, 4);
3016 lo
= adjust_address (temp
, imode
, 8);
3018 else if (!WORDS_BIG_ENDIAN
)
3020 /* Intel little endian, 64-bit mantissa followed by 16-bit
3021 sign+exponent and then either 16 or 48 bits of gap. */
3022 ex
= adjust_address (temp
, iemode
, 8);
3023 hi
= adjust_address (temp
, imode
, 4);
3024 lo
= adjust_address (temp
, imode
, 0);
3028 /* Big endian Itanium. */
3029 ex
= adjust_address (temp
, iemode
, 0);
3030 hi
= adjust_address (temp
, imode
, 2);
3031 lo
= adjust_address (temp
, imode
, 6);
3033 rtx val
= GEN_INT (HOST_WIDE_INT_M1U
<< 30);
3034 gcc_assert (fmt
->qnan_msb_set
);
3035 rtx mask
= GEN_INT (0x7fff);
3036 rtx bit
= GEN_INT (HOST_WIDE_INT_1U
<< 30);
3037 /* For Intel/Motorola extended format, expand to:
3038 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3039 rtx nlo
= expand_unop (imode
, neg_optab
, lo
, NULL_RTX
, 0);
3040 lo
= expand_binop (imode
, ior_optab
, lo
, nlo
,
3041 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
3042 lo
= expand_shift (RSHIFT_EXPR
, imode
, lo
, 31, NULL_RTX
, 1);
3043 temp
= expand_binop (imode
, xor_optab
, hi
, bit
,
3044 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
3045 temp
= expand_binop (imode
, ior_optab
, temp
, lo
,
3046 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
3047 temp
= emit_store_flag_force (target
, GTU
, temp
, val
, imode
, 1, 1);
3048 ex
= expand_binop (iemode
, and_optab
, ex
, mask
,
3049 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
3050 ex
= emit_store_flag_force (gen_reg_rtx (GET_MODE (temp
)), EQ
,
3051 ex
, mask
, iemode
, 1, 1);
3052 temp
= expand_binop (GET_MODE (temp
), and_optab
, temp
, ex
,
3053 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
3063 /* Expand a call to one of the builtin rounding functions gcc defines
3064 as an extension (lfloor and lceil). As these are gcc extensions we
3065 do not need to worry about setting errno to EDOM.
3066 If expanding via optab fails, lower expression to (int)(floor(x)).
3067 EXP is the expression that is a call to the builtin function;
3068 if convenient, the result should be placed in TARGET. */
3071 expand_builtin_int_roundingfn (tree exp
, rtx target
)
3073 convert_optab builtin_optab
;
3076 tree fndecl
= get_callee_fndecl (exp
);
3077 enum built_in_function fallback_fn
;
3078 tree fallback_fndecl
;
3082 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
3085 arg
= CALL_EXPR_ARG (exp
, 0);
3087 switch (DECL_FUNCTION_CODE (fndecl
))
3089 CASE_FLT_FN (BUILT_IN_ICEIL
):
3090 CASE_FLT_FN (BUILT_IN_LCEIL
):
3091 CASE_FLT_FN (BUILT_IN_LLCEIL
):
3092 builtin_optab
= lceil_optab
;
3093 fallback_fn
= BUILT_IN_CEIL
;
3096 CASE_FLT_FN (BUILT_IN_IFLOOR
):
3097 CASE_FLT_FN (BUILT_IN_LFLOOR
):
3098 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
3099 builtin_optab
= lfloor_optab
;
3100 fallback_fn
= BUILT_IN_FLOOR
;
3107 /* Make a suitable register to place result in. */
3108 mode
= TYPE_MODE (TREE_TYPE (exp
));
3110 target
= gen_reg_rtx (mode
);
3112 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3113 need to expand the argument again. This way, we will not perform
3114 side-effects more the once. */
3115 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
3117 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
3121 /* Compute into TARGET. */
3122 if (expand_sfix_optab (target
, op0
, builtin_optab
))
3124 /* Output the entire sequence. */
3125 insns
= get_insns ();
3131 /* If we were unable to expand via the builtin, stop the sequence
3132 (without outputting the insns). */
3135 /* Fall back to floating point rounding optab. */
3136 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
3138 /* For non-C99 targets we may end up without a fallback fndecl here
3139 if the user called __builtin_lfloor directly. In this case emit
3140 a call to the floor/ceil variants nevertheless. This should result
3141 in the best user experience for not full C99 targets. */
3142 if (fallback_fndecl
== NULL_TREE
)
3145 const char *name
= NULL
;
3147 switch (DECL_FUNCTION_CODE (fndecl
))
3149 case BUILT_IN_ICEIL
:
3150 case BUILT_IN_LCEIL
:
3151 case BUILT_IN_LLCEIL
:
3154 case BUILT_IN_ICEILF
:
3155 case BUILT_IN_LCEILF
:
3156 case BUILT_IN_LLCEILF
:
3159 case BUILT_IN_ICEILL
:
3160 case BUILT_IN_LCEILL
:
3161 case BUILT_IN_LLCEILL
:
3164 case BUILT_IN_IFLOOR
:
3165 case BUILT_IN_LFLOOR
:
3166 case BUILT_IN_LLFLOOR
:
3169 case BUILT_IN_IFLOORF
:
3170 case BUILT_IN_LFLOORF
:
3171 case BUILT_IN_LLFLOORF
:
3174 case BUILT_IN_IFLOORL
:
3175 case BUILT_IN_LFLOORL
:
3176 case BUILT_IN_LLFLOORL
:
3183 fntype
= build_function_type_list (TREE_TYPE (arg
),
3184 TREE_TYPE (arg
), NULL_TREE
);
3185 fallback_fndecl
= build_fn_decl (name
, fntype
);
3188 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
3190 tmp
= expand_normal (exp
);
3191 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
3193 /* Truncate the result of floating point optab to integer
3194 via expand_fix (). */
3195 target
= gen_reg_rtx (mode
);
3196 expand_fix (target
, tmp
, 0);
3201 /* Expand a call to one of the builtin math functions doing integer
3203 Return 0 if a normal call should be emitted rather than expanding the
3204 function in-line. EXP is the expression that is a call to the builtin
3205 function; if convenient, the result should be placed in TARGET. */
3208 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
3210 convert_optab builtin_optab
;
3213 tree fndecl
= get_callee_fndecl (exp
);
3216 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
3218 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
3221 arg
= CALL_EXPR_ARG (exp
, 0);
3223 switch (DECL_FUNCTION_CODE (fndecl
))
3225 CASE_FLT_FN (BUILT_IN_IRINT
):
3226 fallback_fn
= BUILT_IN_LRINT
;
3228 CASE_FLT_FN (BUILT_IN_LRINT
):
3229 CASE_FLT_FN (BUILT_IN_LLRINT
):
3230 builtin_optab
= lrint_optab
;
3233 CASE_FLT_FN (BUILT_IN_IROUND
):
3234 fallback_fn
= BUILT_IN_LROUND
;
3236 CASE_FLT_FN (BUILT_IN_LROUND
):
3237 CASE_FLT_FN (BUILT_IN_LLROUND
):
3238 builtin_optab
= lround_optab
;
3245 /* There's no easy way to detect the case we need to set EDOM. */
3246 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
3249 /* Make a suitable register to place result in. */
3250 mode
= TYPE_MODE (TREE_TYPE (exp
));
3252 /* There's no easy way to detect the case we need to set EDOM. */
3253 if (!flag_errno_math
)
3255 rtx result
= gen_reg_rtx (mode
);
3257 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3258 need to expand the argument again. This way, we will not perform
3259 side-effects more the once. */
3260 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
3262 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
3266 if (expand_sfix_optab (result
, op0
, builtin_optab
))
3268 /* Output the entire sequence. */
3269 insns
= get_insns ();
3275 /* If we were unable to expand via the builtin, stop the sequence
3276 (without outputting the insns) and call to the library function
3277 with the stabilized argument list. */
3281 if (fallback_fn
!= BUILT_IN_NONE
)
3283 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3284 targets, (int) round (x) should never be transformed into
3285 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3286 a call to lround in the hope that the target provides at least some
3287 C99 functions. This should result in the best user experience for
3288 not full C99 targets.
3289 As scalar float conversions with same mode are useless in GIMPLE,
3290 we can end up e.g. with _Float32 argument passed to float builtin,
3291 try to get the type from the builtin prototype first. */
3292 tree fallback_fndecl
= NULL_TREE
;
3293 if (tree argtypes
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3295 = mathfn_built_in_1 (TREE_VALUE (argtypes
),
3296 as_combined_fn (fallback_fn
), 0);
3297 if (fallback_fndecl
== NULL_TREE
)
3299 = mathfn_built_in_1 (TREE_TYPE (arg
),
3300 as_combined_fn (fallback_fn
), 0);
3301 if (fallback_fndecl
)
3303 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
3304 fallback_fndecl
, 1, arg
);
3306 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
3307 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
3308 return convert_to_mode (mode
, target
, 0);
3312 return expand_call (exp
, target
, target
== const0_rtx
);
3315 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3316 a normal call should be emitted rather than expanding the function
3317 in-line. EXP is the expression that is a call to the builtin
3318 function; if convenient, the result should be placed in TARGET. */
3321 expand_builtin_powi (tree exp
, rtx target
)
3328 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3331 arg0
= CALL_EXPR_ARG (exp
, 0);
3332 arg1
= CALL_EXPR_ARG (exp
, 1);
3333 mode
= TYPE_MODE (TREE_TYPE (exp
));
3335 /* Emit a libcall to libgcc. */
3337 /* Mode of the 2nd argument must match that of an int. */
3338 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
3340 if (target
== NULL_RTX
)
3341 target
= gen_reg_rtx (mode
);
3343 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
3344 if (GET_MODE (op0
) != mode
)
3345 op0
= convert_to_mode (mode
, op0
, 0);
3346 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3347 if (GET_MODE (op1
) != mode2
)
3348 op1
= convert_to_mode (mode2
, op1
, 0);
3350 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3351 target
, LCT_CONST
, mode
,
3352 op0
, mode
, op1
, mode2
);
3357 /* Expand expression EXP which is a call to the strlen builtin. Return
3358 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3359 try to get the result in TARGET, if convenient. */
3362 expand_builtin_strlen (tree exp
, rtx target
,
3363 machine_mode target_mode
)
3365 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3368 tree src
= CALL_EXPR_ARG (exp
, 0);
3370 /* If the length can be computed at compile-time, return it. */
3371 if (tree len
= c_strlen (src
, 0))
3372 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3374 /* If the length can be computed at compile-time and is constant
3375 integer, but there are side-effects in src, evaluate
3376 src for side-effects, then return len.
3377 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3378 can be optimized into: i++; x = 3; */
3379 tree len
= c_strlen (src
, 1);
3380 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3382 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3383 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3386 unsigned int align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3388 /* If SRC is not a pointer type, don't do this operation inline. */
3392 /* Bail out if we can't compute strlen in the right mode. */
3393 machine_mode insn_mode
;
3394 enum insn_code icode
= CODE_FOR_nothing
;
3395 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
3397 icode
= optab_handler (strlen_optab
, insn_mode
);
3398 if (icode
!= CODE_FOR_nothing
)
3401 if (insn_mode
== VOIDmode
)
3404 /* Make a place to hold the source address. We will not expand
3405 the actual source until we are sure that the expansion will
3406 not fail -- there are trees that cannot be expanded twice. */
3407 rtx src_reg
= gen_reg_rtx (Pmode
);
3409 /* Mark the beginning of the strlen sequence so we can emit the
3410 source operand later. */
3411 rtx_insn
*before_strlen
= get_last_insn ();
3413 class expand_operand ops
[4];
3414 create_output_operand (&ops
[0], target
, insn_mode
);
3415 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3416 create_integer_operand (&ops
[2], 0);
3417 create_integer_operand (&ops
[3], align
);
3418 if (!maybe_expand_insn (icode
, 4, ops
))
3421 /* Check to see if the argument was declared attribute nonstring
3422 and if so, issue a warning since at this point it's not known
3423 to be nul-terminated. */
3424 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3426 /* Now that we are assured of success, expand the source. */
3428 rtx pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3431 #ifdef POINTERS_EXTEND_UNSIGNED
3432 if (GET_MODE (pat
) != Pmode
)
3433 pat
= convert_to_mode (Pmode
, pat
,
3434 POINTERS_EXTEND_UNSIGNED
);
3436 emit_move_insn (src_reg
, pat
);
3442 emit_insn_after (pat
, before_strlen
);
3444 emit_insn_before (pat
, get_insns ());
3446 /* Return the value in the proper mode for this function. */
3447 if (GET_MODE (ops
[0].value
) == target_mode
)
3448 target
= ops
[0].value
;
3449 else if (target
!= 0)
3450 convert_move (target
, ops
[0].value
, 0);
3452 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3457 /* Expand call EXP to the strnlen built-in, returning the result
3458 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3461 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3463 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3466 tree src
= CALL_EXPR_ARG (exp
, 0);
3467 tree bound
= CALL_EXPR_ARG (exp
, 1);
3472 location_t loc
= UNKNOWN_LOCATION
;
3473 if (EXPR_HAS_LOCATION (exp
))
3474 loc
= EXPR_LOCATION (exp
);
3476 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3477 so these conversions aren't necessary. */
3478 c_strlen_data lendata
= { };
3479 tree len
= c_strlen (src
, 0, &lendata
, 1);
3481 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3483 if (TREE_CODE (bound
) == INTEGER_CST
)
3488 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3489 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3492 if (TREE_CODE (bound
) != SSA_NAME
)
3497 get_global_range_query ()->range_of_expr (r
, bound
);
3498 if (r
.varying_p () || r
.undefined_p ())
3500 min
= r
.lower_bound ();
3501 max
= r
.upper_bound ();
3503 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3506 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3514 if (wi::gtu_p (min
, wi::to_wide (len
)))
3515 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3517 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3518 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3521 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3522 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3523 a target constant. */
3526 builtin_memcpy_read_str (void *data
, void *, HOST_WIDE_INT offset
,
3527 fixed_size_mode mode
)
3529 /* The REPresentation pointed to by DATA need not be a nul-terminated
3530 string but the caller guarantees it's large enough for MODE. */
3531 const char *rep
= (const char *) data
;
3533 /* The by-pieces infrastructure does not try to pick a vector mode
3534 for memcpy expansion. */
3535 return c_readstr (rep
+ offset
, as_a
<scalar_int_mode
> (mode
),
3536 /*nul_terminated=*/false);
3539 /* LEN specify length of the block of memcpy/memset operation.
3540 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3541 In some cases we can make very likely guess on max size, then we
3542 set it into PROBABLE_MAX_SIZE. */
3545 determine_block_size (tree len
, rtx len_rtx
,
3546 unsigned HOST_WIDE_INT
*min_size
,
3547 unsigned HOST_WIDE_INT
*max_size
,
3548 unsigned HOST_WIDE_INT
*probable_max_size
)
3550 if (CONST_INT_P (len_rtx
))
3552 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3558 enum value_range_kind range_type
= VR_UNDEFINED
;
3560 /* Determine bounds from the type. */
3561 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3562 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3565 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3566 *probable_max_size
= *max_size
3567 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3569 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3571 if (TREE_CODE (len
) == SSA_NAME
)
3575 get_global_range_query ()->range_of_expr (r
, len
);
3576 range_type
= get_legacy_range (r
, tmin
, tmax
);
3577 if (range_type
!= VR_UNDEFINED
)
3579 min
= wi::to_wide (tmin
);
3580 max
= wi::to_wide (tmax
);
3583 if (range_type
== VR_RANGE
)
3585 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3586 *min_size
= min
.to_uhwi ();
3587 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3588 *probable_max_size
= *max_size
= max
.to_uhwi ();
3590 else if (range_type
== VR_ANTI_RANGE
)
3598 Produce anti range allowing negative values of N. We still
3599 can use the information and make a guess that N is not negative.
3601 if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3602 *probable_max_size
= min
.to_uhwi () - 1;
3605 gcc_checking_assert (*max_size
<=
3606 (unsigned HOST_WIDE_INT
)
3607 GET_MODE_MASK (GET_MODE (len_rtx
)));
3610 /* Expand a call EXP to the memcpy builtin.
3611 Return NULL_RTX if we failed, the caller should emit a normal call,
3612 otherwise try to get the result in TARGET, if convenient (and in
3613 mode MODE if that's convenient). */
3616 expand_builtin_memcpy (tree exp
, rtx target
)
3618 if (!validate_arglist (exp
,
3619 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3622 tree dest
= CALL_EXPR_ARG (exp
, 0);
3623 tree src
= CALL_EXPR_ARG (exp
, 1);
3624 tree len
= CALL_EXPR_ARG (exp
, 2);
3626 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3627 /*retmode=*/ RETURN_BEGIN
, false);
3630 /* Check a call EXP to the memmove built-in for validity.
3631 Return NULL_RTX on both success and failure. */
3634 expand_builtin_memmove (tree exp
, rtx target
)
3636 if (!validate_arglist (exp
,
3637 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3640 tree dest
= CALL_EXPR_ARG (exp
, 0);
3641 tree src
= CALL_EXPR_ARG (exp
, 1);
3642 tree len
= CALL_EXPR_ARG (exp
, 2);
3644 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3645 /*retmode=*/ RETURN_BEGIN
, true);
3648 /* Expand a call EXP to the mempcpy builtin.
3649 Return NULL_RTX if we failed; the caller should emit a normal call,
3650 otherwise try to get the result in TARGET, if convenient (and in
3651 mode MODE if that's convenient). */
3654 expand_builtin_mempcpy (tree exp
, rtx target
)
3656 if (!validate_arglist (exp
,
3657 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3660 tree dest
= CALL_EXPR_ARG (exp
, 0);
3661 tree src
= CALL_EXPR_ARG (exp
, 1);
3662 tree len
= CALL_EXPR_ARG (exp
, 2);
3664 /* Policy does not generally allow using compute_objsize (which
3665 is used internally by check_memop_size) to change code generation
3666 or drive optimization decisions.
3668 In this instance it is safe because the code we generate has
3669 the same semantics regardless of the return value of
3670 check_memop_sizes. Exactly the same amount of data is copied
3671 and the return value is exactly the same in both cases.
3673 Furthermore, check_memop_size always uses mode 0 for the call to
3674 compute_objsize, so the imprecise nature of compute_objsize is
3677 /* Avoid expanding mempcpy into memcpy when the call is determined
3678 to overflow the buffer. This also prevents the same overflow
3679 from being diagnosed again when expanding memcpy. */
3681 return expand_builtin_mempcpy_args (dest
, src
, len
,
3682 target
, exp
, /*retmode=*/ RETURN_END
);
3685 /* Helper function to do the actual work for expand of memory copy family
3686 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3687 of memory from SRC to DEST and assign to TARGET if convenient. Return
3688 value is based on RETMODE argument. */
3691 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3692 rtx target
, tree exp
, memop_ret retmode
,
3695 unsigned int src_align
= get_pointer_alignment (src
);
3696 unsigned int dest_align
= get_pointer_alignment (dest
);
3697 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3698 HOST_WIDE_INT expected_size
= -1;
3699 unsigned int expected_align
= 0;
3700 unsigned HOST_WIDE_INT min_size
;
3701 unsigned HOST_WIDE_INT max_size
;
3702 unsigned HOST_WIDE_INT probable_max_size
;
3706 /* If DEST is not a pointer type, call the normal function. */
3707 if (dest_align
== 0)
3710 /* If either SRC is not a pointer type, don't do this
3711 operation in-line. */
3715 if (currently_expanding_gimple_stmt
)
3716 stringop_block_profile (currently_expanding_gimple_stmt
,
3717 &expected_align
, &expected_size
);
3719 if (expected_align
< dest_align
)
3720 expected_align
= dest_align
;
3721 dest_mem
= get_memory_rtx (dest
, len
);
3722 set_mem_align (dest_mem
, dest_align
);
3723 len_rtx
= expand_normal (len
);
3724 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3725 &probable_max_size
);
3727 /* Try to get the byte representation of the constant SRC points to,
3728 with its byte size in NBYTES. */
3729 unsigned HOST_WIDE_INT nbytes
;
3730 const char *rep
= getbyterep (src
, &nbytes
);
3732 /* If the function's constant bound LEN_RTX is less than or equal
3733 to the byte size of the representation of the constant argument,
3734 and if block move would be done by pieces, we can avoid loading
3735 the bytes from memory and only store the computed constant.
3736 This works in the overlap (memmove) case as well because
3737 store_by_pieces just generates a series of stores of constants
3738 from the representation returned by getbyterep(). */
3740 && CONST_INT_P (len_rtx
)
3741 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
3742 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3743 CONST_CAST (char *, rep
),
3746 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3747 builtin_memcpy_read_str
,
3748 CONST_CAST (char *, rep
),
3749 dest_align
, false, retmode
);
3750 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3751 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3755 src_mem
= get_memory_rtx (src
, len
);
3756 set_mem_align (src_mem
, src_align
);
3758 /* Copy word part most expediently. */
3759 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3760 if (CALL_EXPR_TAILCALL (exp
)
3761 && (retmode
== RETURN_BEGIN
|| target
== const0_rtx
))
3762 method
= BLOCK_OP_TAILCALL
;
3763 bool use_mempcpy_call
= (targetm
.libc_has_fast_function (BUILT_IN_MEMPCPY
)
3764 && retmode
== RETURN_END
3766 && target
!= const0_rtx
);
3767 if (use_mempcpy_call
)
3768 method
= BLOCK_OP_NO_LIBCALL_RET
;
3769 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3770 expected_align
, expected_size
,
3771 min_size
, max_size
, probable_max_size
,
3772 use_mempcpy_call
, &is_move_done
,
3775 /* Bail out when a mempcpy call would be expanded as libcall and when
3776 we have a target that provides a fast implementation
3777 of mempcpy routine. */
3781 if (dest_addr
== pc_rtx
)
3786 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3787 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3790 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3792 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3793 /* stpcpy pointer to last byte. */
3794 if (retmode
== RETURN_END_MINUS_ONE
)
3795 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3802 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3803 rtx target
, tree orig_exp
, memop_ret retmode
)
3805 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3809 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3810 we failed, the caller should emit a normal call, otherwise try to
3811 get the result in TARGET, if convenient.
3812 Return value is based on RETMODE argument. */
3815 expand_movstr (tree dest
, tree src
, rtx target
, memop_ret retmode
)
3817 class expand_operand ops
[3];
3821 if (!targetm
.have_movstr ())
3824 dest_mem
= get_memory_rtx (dest
, NULL
);
3825 src_mem
= get_memory_rtx (src
, NULL
);
3826 if (retmode
== RETURN_BEGIN
)
3828 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3829 dest_mem
= replace_equiv_address (dest_mem
, target
);
3832 create_output_operand (&ops
[0],
3833 retmode
!= RETURN_BEGIN
? target
: NULL_RTX
, Pmode
);
3834 create_fixed_operand (&ops
[1], dest_mem
);
3835 create_fixed_operand (&ops
[2], src_mem
);
3836 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3839 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3841 target
= ops
[0].value
;
3842 /* movstr is supposed to set end to the address of the NUL
3843 terminator. If the caller requested a mempcpy-like return value,
3845 if (retmode
== RETURN_END
)
3847 rtx tem
= plus_constant (GET_MODE (target
),
3848 gen_lowpart (GET_MODE (target
), target
), 1);
3849 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3855 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3856 NULL_RTX if we failed the caller should emit a normal call, otherwise
3857 try to get the result in TARGET, if convenient (and in mode MODE if that's
3861 expand_builtin_strcpy (tree exp
, rtx target
)
3863 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3866 tree dest
= CALL_EXPR_ARG (exp
, 0);
3867 tree src
= CALL_EXPR_ARG (exp
, 1);
3869 return expand_builtin_strcpy_args (exp
, dest
, src
, target
);
3872 /* Helper function to do the actual work for expand_builtin_strcpy. The
3873 arguments to the builtin_strcpy call DEST and SRC are broken out
3874 so that this can also be called without constructing an actual CALL_EXPR.
3875 The other arguments and return value are the same as for
3876 expand_builtin_strcpy. */
3879 expand_builtin_strcpy_args (tree
, tree dest
, tree src
, rtx target
)
3881 return expand_movstr (dest
, src
, target
, /*retmode=*/ RETURN_BEGIN
);
3884 /* Expand a call EXP to the stpcpy builtin.
3885 Return NULL_RTX if we failed the caller should emit a normal call,
3886 otherwise try to get the result in TARGET, if convenient (and in
3887 mode MODE if that's convenient). */
3890 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
3893 location_t loc
= EXPR_LOCATION (exp
);
3895 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3898 dst
= CALL_EXPR_ARG (exp
, 0);
3899 src
= CALL_EXPR_ARG (exp
, 1);
3901 /* If return value is ignored, transform stpcpy into strcpy. */
3902 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3904 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3905 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3906 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3913 /* Ensure we get an actual string whose length can be evaluated at
3914 compile-time, not an expression containing a string. This is
3915 because the latter will potentially produce pessimized code
3916 when used to produce the return value. */
3917 c_strlen_data lendata
= { };
3919 || !(len
= c_strlen (src
, 0, &lendata
, 1)))
3920 return expand_movstr (dst
, src
, target
,
3921 /*retmode=*/ RETURN_END_MINUS_ONE
);
3923 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3924 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3926 /*retmode=*/ RETURN_END_MINUS_ONE
);
3931 if (TREE_CODE (len
) == INTEGER_CST
)
3933 rtx len_rtx
= expand_normal (len
);
3935 if (CONST_INT_P (len_rtx
))
3937 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
3943 if (mode
!= VOIDmode
)
3944 target
= gen_reg_rtx (mode
);
3946 target
= gen_reg_rtx (GET_MODE (ret
));
3948 if (GET_MODE (target
) != GET_MODE (ret
))
3949 ret
= gen_lowpart (GET_MODE (target
), ret
);
3951 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3952 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3960 return expand_movstr (dst
, src
, target
,
3961 /*retmode=*/ RETURN_END_MINUS_ONE
);
3965 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3966 arguments while being careful to avoid duplicate warnings (which could
3967 be issued if the expander were to expand the call, resulting in it
3968 being emitted in expand_call(). */
3971 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3973 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
3975 /* The call has been successfully expanded. Check for nonstring
3976 arguments and issue warnings as appropriate. */
3977 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3984 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3985 bytes from constant string DATA + OFFSET and return it as target
3989 builtin_strncpy_read_str (void *data
, void *, HOST_WIDE_INT offset
,
3990 fixed_size_mode mode
)
3992 const char *str
= (const char *) data
;
3994 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3997 /* The by-pieces infrastructure does not try to pick a vector mode
3998 for strncpy expansion. */
3999 return c_readstr (str
+ offset
, as_a
<scalar_int_mode
> (mode
));
4002 /* Helper to check the sizes of sequences and the destination of calls
4003 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4004 success (no overflow or invalid sizes), false otherwise. */
4007 check_strncat_sizes (tree exp
, tree objsize
)
4009 tree dest
= CALL_EXPR_ARG (exp
, 0);
4010 tree src
= CALL_EXPR_ARG (exp
, 1);
4011 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4013 /* Try to determine the range of lengths that the source expression
4015 c_strlen_data lendata
= { };
4016 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
4018 /* Try to verify that the destination is big enough for the shortest
4021 access_data
data (nullptr, exp
, access_read_write
, maxread
, true);
4022 if (!objsize
&& warn_stringop_overflow
)
4024 /* If it hasn't been provided by __strncat_chk, try to determine
4025 the size of the destination object into which the source is
4027 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
4030 /* Add one for the terminating nul. */
4031 tree srclen
= (lendata
.minlen
4032 ? fold_build2 (PLUS_EXPR
, size_type_node
, lendata
.minlen
,
4036 /* The strncat function copies at most MAXREAD bytes and always appends
4037 the terminating nul so the specified upper bound should never be equal
4038 to (or greater than) the size of the destination. */
4039 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
4040 && tree_int_cst_equal (objsize
, maxread
))
4042 location_t loc
= EXPR_LOCATION (exp
);
4043 warning_at (loc
, OPT_Wstringop_overflow_
,
4044 "%qD specified bound %E equals destination size",
4045 get_callee_fndecl (exp
), maxread
);
4051 || (maxread
&& tree_fits_uhwi_p (maxread
)
4052 && tree_fits_uhwi_p (srclen
)
4053 && tree_int_cst_lt (maxread
, srclen
)))
4056 /* The number of bytes to write is LEN but check_access will alsoa
4057 check SRCLEN if LEN's value isn't known. */
4058 return check_access (exp
, /*dstwrite=*/NULL_TREE
, maxread
, srclen
,
4059 objsize
, data
.mode
, &data
);
4062 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4063 NULL_RTX if we failed the caller should emit a normal call. */
4066 expand_builtin_strncpy (tree exp
, rtx target
)
4068 location_t loc
= EXPR_LOCATION (exp
);
4070 if (!validate_arglist (exp
,
4071 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4073 tree dest
= CALL_EXPR_ARG (exp
, 0);
4074 tree src
= CALL_EXPR_ARG (exp
, 1);
4075 /* The number of bytes to write (not the maximum). */
4076 tree len
= CALL_EXPR_ARG (exp
, 2);
4078 /* The length of the source sequence. */
4079 tree slen
= c_strlen (src
, 1);
4081 /* We must be passed a constant len and src parameter. */
4082 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4085 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4087 /* We're required to pad with trailing zeros if the requested
4088 len is greater than strlen(s2)+1. In that case try to
4089 use store_by_pieces, if it fails, punt. */
4090 if (tree_int_cst_lt (slen
, len
))
4092 unsigned int dest_align
= get_pointer_alignment (dest
);
4093 const char *p
= c_getstr (src
);
4096 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4097 || !can_store_by_pieces (tree_to_uhwi (len
),
4098 builtin_strncpy_read_str
,
4099 CONST_CAST (char *, p
),
4103 dest_mem
= get_memory_rtx (dest
, len
);
4104 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4105 builtin_strncpy_read_str
,
4106 CONST_CAST (char *, p
), dest_align
, false,
4108 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4109 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4116 /* Return the RTL of a register in MODE generated from PREV in the
4117 previous iteration. */
4120 gen_memset_value_from_prev (by_pieces_prev
*prev
, fixed_size_mode mode
)
4122 rtx target
= nullptr;
4123 if (prev
!= nullptr && prev
->data
!= nullptr)
4125 /* Use the previous data in the same mode. */
4126 if (prev
->mode
== mode
)
4129 fixed_size_mode prev_mode
= prev
->mode
;
4131 /* Don't use the previous data to write QImode if it is in a
4133 if (VECTOR_MODE_P (prev_mode
) && mode
== QImode
)
4136 rtx prev_rtx
= prev
->data
;
4138 if (REG_P (prev_rtx
)
4139 && HARD_REGISTER_P (prev_rtx
)
4140 && lowpart_subreg_regno (REGNO (prev_rtx
), prev_mode
, mode
) < 0)
4142 /* This case occurs when PREV_MODE is a vector and when
4143 MODE is too small to store using vector operations.
4144 After register allocation, the code will need to move the
4145 lowpart of the vector register into a non-vector register.
4147 Also, the target has chosen to use a hard register
4148 instead of going with the default choice of using a
4149 pseudo register. We should respect that choice and try to
4150 avoid creating a pseudo register with the same mode as the
4151 current hard register.
4153 In principle, we could just use a lowpart MODE subreg of
4154 the vector register. However, the vector register mode might
4155 be too wide for non-vector registers, and we already know
4156 that the non-vector mode is too small for vector registers.
4157 It's therefore likely that we'd need to spill to memory in
4158 the vector mode and reload the non-vector value from there.
4160 Try to avoid that by reducing the vector register to the
4161 smallest size that it can hold. This should increase the
4162 chances that non-vector registers can hold both the inner
4163 and outer modes of the subreg that we generate later. */
4165 fixed_size_mode candidate
;
4166 FOR_EACH_MODE_IN_CLASS (m
, GET_MODE_CLASS (mode
))
4167 if (is_a
<fixed_size_mode
> (m
, &candidate
))
4169 if (GET_MODE_SIZE (candidate
)
4170 >= GET_MODE_SIZE (prev_mode
))
4172 if (GET_MODE_SIZE (candidate
) >= GET_MODE_SIZE (mode
)
4173 && lowpart_subreg_regno (REGNO (prev_rtx
),
4174 prev_mode
, candidate
) >= 0)
4176 target
= lowpart_subreg (candidate
, prev_rtx
,
4179 prev_mode
= candidate
;
4183 if (target
== nullptr)
4184 prev_rtx
= copy_to_reg (prev_rtx
);
4187 target
= lowpart_subreg (mode
, prev_rtx
, prev_mode
);
4192 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4193 bytes from constant string DATA + OFFSET and return it as target
4194 constant. If PREV isn't nullptr, it has the RTL info from the
4195 previous iteration. */
4198 builtin_memset_read_str (void *data
, void *prev
,
4199 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4200 fixed_size_mode mode
)
4202 const char *c
= (const char *) data
;
4203 unsigned int size
= GET_MODE_SIZE (mode
);
4205 rtx target
= gen_memset_value_from_prev ((by_pieces_prev
*) prev
,
4207 if (target
!= nullptr)
4209 rtx src
= gen_int_mode (*c
, QImode
);
4211 if (VECTOR_MODE_P (mode
))
4213 gcc_assert (GET_MODE_INNER (mode
) == QImode
);
4215 rtx const_vec
= gen_const_vec_duplicate (mode
, src
);
4217 /* Return CONST_VECTOR when called by a query function. */
4220 /* Use the move expander with CONST_VECTOR. */
4221 target
= gen_reg_rtx (mode
);
4222 emit_move_insn (target
, const_vec
);
4226 char *p
= XALLOCAVEC (char, size
);
4228 memset (p
, *c
, size
);
4230 /* Vector modes should be handled above. */
4231 return c_readstr (p
, as_a
<scalar_int_mode
> (mode
));
4234 /* Callback routine for store_by_pieces. Return the RTL of a register
4235 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4236 char value given in the RTL register data. For example, if mode is
4237 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4238 nullptr, it has the RTL info from the previous iteration. */
4241 builtin_memset_gen_str (void *data
, void *prev
,
4242 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4243 fixed_size_mode mode
)
4249 size
= GET_MODE_SIZE (mode
);
4253 target
= gen_memset_value_from_prev ((by_pieces_prev
*) prev
, mode
);
4254 if (target
!= nullptr)
4257 if (VECTOR_MODE_P (mode
))
4259 gcc_assert (GET_MODE_INNER (mode
) == QImode
);
4261 /* vec_duplicate_optab is a precondition to pick a vector mode for
4262 the memset expander. */
4263 insn_code icode
= optab_handler (vec_duplicate_optab
, mode
);
4265 target
= gen_reg_rtx (mode
);
4266 class expand_operand ops
[2];
4267 create_output_operand (&ops
[0], target
, mode
);
4268 create_input_operand (&ops
[1], (rtx
) data
, QImode
);
4269 expand_insn (icode
, 2, ops
);
4270 if (!rtx_equal_p (target
, ops
[0].value
))
4271 emit_move_insn (target
, ops
[0].value
);
4276 p
= XALLOCAVEC (char, size
);
4277 memset (p
, 1, size
);
4278 /* Vector modes should be handled above. */
4279 coeff
= c_readstr (p
, as_a
<scalar_int_mode
> (mode
));
4281 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4282 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4283 return force_reg (mode
, target
);
4286 /* Expand expression EXP, which is a call to the memset builtin. Return
4287 NULL_RTX if we failed the caller should emit a normal call, otherwise
4288 try to get the result in TARGET, if convenient (and in mode MODE if that's
4292 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4294 if (!validate_arglist (exp
,
4295 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4298 tree dest
= CALL_EXPR_ARG (exp
, 0);
4299 tree val
= CALL_EXPR_ARG (exp
, 1);
4300 tree len
= CALL_EXPR_ARG (exp
, 2);
4302 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4305 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4306 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4307 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4308 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4310 The strategy is to issue one store_by_pieces for each power of two,
4311 from most to least significant, guarded by a test on whether there
4312 are at least that many bytes left to copy in LEN.
4314 ??? Should we skip some powers of two in favor of loops? Maybe start
4315 at the max of TO/LEN/word alignment, at least when optimizing for
4316 size, instead of ensuring O(log len) dynamic compares? */
4319 try_store_by_multiple_pieces (rtx to
, rtx len
, unsigned int ctz_len
,
4320 unsigned HOST_WIDE_INT min_len
,
4321 unsigned HOST_WIDE_INT max_len
,
4322 rtx val
, char valc
, unsigned int align
)
4324 int max_bits
= floor_log2 (max_len
);
4325 int min_bits
= floor_log2 (min_len
);
4326 int sctz_len
= ctz_len
;
4328 gcc_checking_assert (sctz_len
>= 0);
4333 /* Bits more significant than TST_BITS are part of the shared prefix
4334 in the binary representation of both min_len and max_len. Since
4335 they're identical, we don't need to test them in the loop. */
4336 int tst_bits
= (max_bits
!= min_bits
? max_bits
4337 : floor_log2 (max_len
^ min_len
));
4339 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4340 bytes, to lower max_bits. In the unlikely case of a constant LEN
4341 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4342 single store_by_pieces, but otherwise, select the minimum multiple
4343 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4344 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4345 unsigned HOST_WIDE_INT blksize
;
4346 if (max_len
> min_len
)
4348 unsigned HOST_WIDE_INT alrng
= MAX (HOST_WIDE_INT_1U
<< ctz_len
,
4349 align
/ BITS_PER_UNIT
);
4350 blksize
= max_len
- (HOST_WIDE_INT_1U
<< tst_bits
) + alrng
;
4351 blksize
&= ~(alrng
- 1);
4353 else if (max_len
== min_len
)
4356 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4358 if (min_len
>= blksize
)
4361 min_bits
= floor_log2 (min_len
);
4363 max_bits
= floor_log2 (max_len
);
4365 tst_bits
= (max_bits
!= min_bits
? max_bits
4366 : floor_log2 (max_len
^ min_len
));
4371 /* Check that we can use store by pieces for the maximum store count
4372 we may issue (initial fixed-size block, plus conditional
4373 power-of-two-sized from max_bits to ctz_len. */
4374 unsigned HOST_WIDE_INT xlenest
= blksize
;
4376 xlenest
+= ((HOST_WIDE_INT_1U
<< max_bits
) * 2
4377 - (HOST_WIDE_INT_1U
<< ctz_len
));
4378 if (!can_store_by_pieces (xlenest
, builtin_memset_read_str
,
4379 &valc
, align
, true))
4382 by_pieces_constfn constfun
;
4386 constfun
= builtin_memset_gen_str
;
4387 constfundata
= val
= force_reg (TYPE_MODE (unsigned_char_type_node
),
4392 constfun
= builtin_memset_read_str
;
4393 constfundata
= &valc
;
4396 rtx ptr
= copy_addr_to_reg (XEXP (to
, 0));
4397 rtx rem
= copy_to_mode_reg (ptr_mode
, convert_to_mode (ptr_mode
, len
, 0));
4398 to
= replace_equiv_address (to
, ptr
);
4399 set_mem_align (to
, align
);
4403 to
= store_by_pieces (to
, blksize
,
4404 constfun
, constfundata
,
4406 max_len
!= 0 ? RETURN_END
: RETURN_BEGIN
);
4410 /* Adjust PTR, TO and REM. Since TO's address is likely
4411 PTR+offset, we have to replace it. */
4412 emit_move_insn (ptr
, force_operand (XEXP (to
, 0), NULL_RTX
));
4413 to
= replace_equiv_address (to
, ptr
);
4414 rtx rem_minus_blksize
= plus_constant (ptr_mode
, rem
, -blksize
);
4415 emit_move_insn (rem
, force_operand (rem_minus_blksize
, NULL_RTX
));
4418 /* Iterate over power-of-two block sizes from the maximum length to
4419 the least significant bit possibly set in the length. */
4420 for (int i
= max_bits
; i
>= sctz_len
; i
--)
4422 rtx_code_label
*label
= NULL
;
4423 blksize
= HOST_WIDE_INT_1U
<< i
;
4425 /* If we're past the bits shared between min_ and max_len, expand
4426 a test on the dynamic length, comparing it with the
4430 label
= gen_label_rtx ();
4431 emit_cmp_and_jump_insns (rem
, GEN_INT (blksize
), LT
, NULL
,
4433 profile_probability::even ());
4435 /* If we are at a bit that is in the prefix shared by min_ and
4436 max_len, skip this BLKSIZE if the bit is clear. */
4437 else if ((max_len
& blksize
) == 0)
4440 /* Issue a store of BLKSIZE bytes. */
4441 to
= store_by_pieces (to
, blksize
,
4442 constfun
, constfundata
,
4444 i
!= sctz_len
? RETURN_END
: RETURN_BEGIN
);
4446 /* Adjust REM and PTR, unless this is the last iteration. */
4449 emit_move_insn (ptr
, force_operand (XEXP (to
, 0), NULL_RTX
));
4450 to
= replace_equiv_address (to
, ptr
);
4451 rtx rem_minus_blksize
= plus_constant (ptr_mode
, rem
, -blksize
);
4452 emit_move_insn (rem
, force_operand (rem_minus_blksize
, NULL_RTX
));
4459 /* Given conditional stores, the offset can no longer be
4460 known, so clear it. */
4461 clear_mem_offset (to
);
4468 /* Helper function to do the actual work for expand_builtin_memset. The
4469 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4470 so that this can also be called without constructing an actual CALL_EXPR.
4471 The other arguments and return value are the same as for
4472 expand_builtin_memset. */
4475 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4476 rtx target
, machine_mode mode
, tree orig_exp
)
4479 enum built_in_function fcode
;
4480 machine_mode val_mode
;
4482 unsigned int dest_align
;
4483 rtx dest_mem
, dest_addr
, len_rtx
;
4484 HOST_WIDE_INT expected_size
= -1;
4485 unsigned int expected_align
= 0;
4486 unsigned HOST_WIDE_INT min_size
;
4487 unsigned HOST_WIDE_INT max_size
;
4488 unsigned HOST_WIDE_INT probable_max_size
;
4490 dest_align
= get_pointer_alignment (dest
);
4492 /* If DEST is not a pointer type, don't do this operation in-line. */
4493 if (dest_align
== 0)
4496 if (currently_expanding_gimple_stmt
)
4497 stringop_block_profile (currently_expanding_gimple_stmt
,
4498 &expected_align
, &expected_size
);
4500 if (expected_align
< dest_align
)
4501 expected_align
= dest_align
;
4503 /* If the LEN parameter is zero, return DEST. */
4504 if (integer_zerop (len
))
4506 /* Evaluate and ignore VAL in case it has side-effects. */
4507 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4508 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4511 /* Stabilize the arguments in case we fail. */
4512 dest
= builtin_save_expr (dest
);
4513 val
= builtin_save_expr (val
);
4514 len
= builtin_save_expr (len
);
4516 len_rtx
= expand_normal (len
);
4517 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4518 &probable_max_size
);
4519 dest_mem
= get_memory_rtx (dest
, len
);
4520 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4522 if (TREE_CODE (val
) != INTEGER_CST
4523 || target_char_cast (val
, &c
))
4527 val_rtx
= expand_normal (val
);
4528 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4530 /* Assume that we can memset by pieces if we can store
4531 * the coefficients by pieces (in the required modes).
4532 * We can't pass builtin_memset_gen_str as that emits RTL. */
4534 if (tree_fits_uhwi_p (len
)
4535 && can_store_by_pieces (tree_to_uhwi (len
),
4536 builtin_memset_read_str
, &c
, dest_align
,
4539 val_rtx
= force_reg (val_mode
, val_rtx
);
4540 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4541 builtin_memset_gen_str
, val_rtx
, dest_align
,
4542 true, RETURN_BEGIN
);
4544 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4545 dest_align
, expected_align
,
4546 expected_size
, min_size
, max_size
,
4548 && !try_store_by_multiple_pieces (dest_mem
, len_rtx
,
4555 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4556 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4562 if (tree_fits_uhwi_p (len
)
4563 && can_store_by_pieces (tree_to_uhwi (len
),
4564 builtin_memset_read_str
, &c
, dest_align
,
4566 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4567 builtin_memset_read_str
, &c
, dest_align
, true,
4569 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4570 gen_int_mode (c
, val_mode
),
4571 dest_align
, expected_align
,
4572 expected_size
, min_size
, max_size
,
4574 && !try_store_by_multiple_pieces (dest_mem
, len_rtx
,
4581 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4582 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4586 set_mem_align (dest_mem
, dest_align
);
4587 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4588 CALL_EXPR_TAILCALL (orig_exp
)
4589 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4590 expected_align
, expected_size
,
4592 probable_max_size
, tree_ctz (len
));
4596 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4597 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4603 fndecl
= get_callee_fndecl (orig_exp
);
4604 fcode
= DECL_FUNCTION_CODE (fndecl
);
4605 if (fcode
== BUILT_IN_MEMSET
)
4606 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4608 else if (fcode
== BUILT_IN_BZERO
)
4609 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4613 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4614 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4615 return expand_call (fn
, target
, target
== const0_rtx
);
4618 /* Expand expression EXP, which is a call to the bzero builtin. Return
4619 NULL_RTX if we failed the caller should emit a normal call. */
4622 expand_builtin_bzero (tree exp
)
4624 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4627 tree dest
= CALL_EXPR_ARG (exp
, 0);
4628 tree size
= CALL_EXPR_ARG (exp
, 1);
4630 /* New argument list transforming bzero(ptr x, int y) to
4631 memset(ptr x, int 0, size_t y). This is done this way
4632 so that if it isn't expanded inline, we fallback to
4633 calling bzero instead of memset. */
4635 location_t loc
= EXPR_LOCATION (exp
);
4637 return expand_builtin_memset_args (dest
, integer_zero_node
,
4638 fold_convert_loc (loc
,
4639 size_type_node
, size
),
4640 const0_rtx
, VOIDmode
, exp
);
4643 /* Try to expand cmpstr operation ICODE with the given operands.
4644 Return the result rtx on success, otherwise return null. */
4647 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4648 HOST_WIDE_INT align
)
4650 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4652 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4655 class expand_operand ops
[4];
4656 create_output_operand (&ops
[0], target
, insn_mode
);
4657 create_fixed_operand (&ops
[1], arg1_rtx
);
4658 create_fixed_operand (&ops
[2], arg2_rtx
);
4659 create_integer_operand (&ops
[3], align
);
4660 if (maybe_expand_insn (icode
, 4, ops
))
4661 return ops
[0].value
;
4665 /* Expand expression EXP, which is a call to the memcmp built-in function.
4666 Return NULL_RTX if we failed and the caller should emit a normal call,
4667 otherwise try to get the result in TARGET, if convenient.
4668 RESULT_EQ is true if we can relax the returned value to be either zero
4669 or nonzero, without caring about the sign. */
4672 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4674 if (!validate_arglist (exp
,
4675 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4678 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4679 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4680 tree len
= CALL_EXPR_ARG (exp
, 2);
4682 /* Due to the performance benefit, always inline the calls first
4683 when result_eq is false. */
4684 rtx result
= NULL_RTX
;
4685 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
4686 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
4688 result
= inline_expand_builtin_bytecmp (exp
, target
);
4693 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4694 location_t loc
= EXPR_LOCATION (exp
);
4696 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4697 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4699 /* If we don't have POINTER_TYPE, call the function. */
4700 if (arg1_align
== 0 || arg2_align
== 0)
4703 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4704 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4705 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4707 /* Set MEM_SIZE as appropriate. */
4708 if (CONST_INT_P (len_rtx
))
4710 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4711 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4714 by_pieces_constfn constfn
= NULL
;
4716 /* Try to get the byte representation of the constant ARG2 (or, only
4717 when the function's result is used for equality to zero, ARG1)
4718 points to, with its byte size in NBYTES. */
4719 unsigned HOST_WIDE_INT nbytes
;
4720 const char *rep
= getbyterep (arg2
, &nbytes
);
4721 if (result_eq
&& rep
== NULL
)
4723 /* For equality to zero the arguments are interchangeable. */
4724 rep
= getbyterep (arg1
, &nbytes
);
4726 std::swap (arg1_rtx
, arg2_rtx
);
4729 /* If the function's constant bound LEN_RTX is less than or equal
4730 to the byte size of the representation of the constant argument,
4731 and if block move would be done by pieces, we can avoid loading
4732 the bytes from memory and only store the computed constant result. */
4734 && CONST_INT_P (len_rtx
)
4735 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
)
4736 constfn
= builtin_memcpy_read_str
;
4738 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4739 TREE_TYPE (len
), target
,
4741 CONST_CAST (char *, rep
));
4745 /* Return the value in the proper mode for this function. */
4746 if (GET_MODE (result
) == mode
)
4751 convert_move (target
, result
, 0);
4755 return convert_to_mode (mode
, result
, 0);
4761 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4762 if we failed the caller should emit a normal call, otherwise try to get
4763 the result in TARGET, if convenient. */
4766 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4768 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4771 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4772 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4774 /* Due to the performance benefit, always inline the calls first. */
4775 rtx result
= NULL_RTX
;
4776 result
= inline_expand_builtin_bytecmp (exp
, target
);
4780 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4781 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4782 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4785 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4786 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4788 /* If we don't have POINTER_TYPE, call the function. */
4789 if (arg1_align
== 0 || arg2_align
== 0)
4792 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4793 arg1
= builtin_save_expr (arg1
);
4794 arg2
= builtin_save_expr (arg2
);
4796 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4797 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4799 /* Try to call cmpstrsi. */
4800 if (cmpstr_icode
!= CODE_FOR_nothing
)
4801 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4802 MIN (arg1_align
, arg2_align
));
4804 /* Try to determine at least one length and call cmpstrnsi. */
4805 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4810 tree len1
= c_strlen (arg1
, 1);
4811 tree len2
= c_strlen (arg2
, 1);
4814 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4816 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4818 /* If we don't have a constant length for the first, use the length
4819 of the second, if we know it. We don't require a constant for
4820 this case; some cost analysis could be done if both are available
4821 but neither is constant. For now, assume they're equally cheap,
4822 unless one has side effects. If both strings have constant lengths,
4829 else if (TREE_SIDE_EFFECTS (len1
))
4831 else if (TREE_SIDE_EFFECTS (len2
))
4833 else if (TREE_CODE (len1
) != INTEGER_CST
)
4835 else if (TREE_CODE (len2
) != INTEGER_CST
)
4837 else if (tree_int_cst_lt (len1
, len2
))
4842 /* If both arguments have side effects, we cannot optimize. */
4843 if (len
&& !TREE_SIDE_EFFECTS (len
))
4845 arg3_rtx
= expand_normal (len
);
4846 result
= expand_cmpstrn_or_cmpmem
4847 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4848 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4852 tree fndecl
= get_callee_fndecl (exp
);
4855 /* Return the value in the proper mode for this function. */
4856 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4857 if (GET_MODE (result
) == mode
)
4860 return convert_to_mode (mode
, result
, 0);
4861 convert_move (target
, result
, 0);
4865 /* Expand the library call ourselves using a stabilized argument
4866 list to avoid re-evaluating the function's arguments twice. */
4867 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4868 copy_warning (fn
, exp
);
4869 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4870 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4871 return expand_call (fn
, target
, target
== const0_rtx
);
4874 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4875 NULL_RTX if we failed the caller should emit a normal call, otherwise
4876 try to get the result in TARGET, if convenient. */
4879 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4880 ATTRIBUTE_UNUSED machine_mode mode
)
4882 if (!validate_arglist (exp
,
4883 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4886 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4887 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4888 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4890 location_t loc
= EXPR_LOCATION (exp
);
4891 tree len1
= c_strlen (arg1
, 1);
4892 tree len2
= c_strlen (arg2
, 1);
4894 /* Due to the performance benefit, always inline the calls first. */
4895 rtx result
= NULL_RTX
;
4896 result
= inline_expand_builtin_bytecmp (exp
, target
);
4900 /* If c_strlen can determine an expression for one of the string
4901 lengths, and it doesn't have side effects, then emit cmpstrnsi
4902 using length MIN(strlen(string)+1, arg3). */
4903 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4904 if (cmpstrn_icode
== CODE_FOR_nothing
)
4909 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4910 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4913 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4915 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4917 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4919 /* If we don't have a constant length for the first, use the length
4920 of the second, if we know it. If neither string is constant length,
4921 use the given length argument. We don't require a constant for
4922 this case; some cost analysis could be done if both are available
4923 but neither is constant. For now, assume they're equally cheap,
4924 unless one has side effects. If both strings have constant lengths,
4933 else if (TREE_SIDE_EFFECTS (len1
))
4935 else if (TREE_SIDE_EFFECTS (len2
))
4937 else if (TREE_CODE (len1
) != INTEGER_CST
)
4939 else if (TREE_CODE (len2
) != INTEGER_CST
)
4941 else if (tree_int_cst_lt (len1
, len2
))
4946 /* If we are not using the given length, we must incorporate it here.
4947 The actual new length parameter will be MIN(len,arg3) in this case. */
4950 len
= fold_convert_loc (loc
, sizetype
, len
);
4951 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4953 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4954 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4955 rtx arg3_rtx
= expand_normal (len
);
4956 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4957 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4958 MIN (arg1_align
, arg2_align
));
4960 tree fndecl
= get_callee_fndecl (exp
);
4963 /* Return the value in the proper mode for this function. */
4964 mode
= TYPE_MODE (TREE_TYPE (exp
));
4965 if (GET_MODE (result
) == mode
)
4968 return convert_to_mode (mode
, result
, 0);
4969 convert_move (target
, result
, 0);
4973 /* Expand the library call ourselves using a stabilized argument
4974 list to avoid re-evaluating the function's arguments twice. */
4975 tree call
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
4976 copy_warning (call
, exp
);
4977 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
4978 CALL_EXPR_TAILCALL (call
) = CALL_EXPR_TAILCALL (exp
);
4979 return expand_call (call
, target
, target
== const0_rtx
);
4982 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4983 if that's convenient. */
4986 expand_builtin_saveregs (void)
4991 /* Don't do __builtin_saveregs more than once in a function.
4992 Save the result of the first call and reuse it. */
4993 if (saveregs_value
!= 0)
4994 return saveregs_value
;
4996 /* When this function is called, it means that registers must be
4997 saved on entry to this function. So we migrate the call to the
4998 first insn of this function. */
5002 /* Do whatever the machine needs done in this case. */
5003 val
= targetm
.calls
.expand_builtin_saveregs ();
5008 saveregs_value
= val
;
5010 /* Put the insns after the NOTE that starts the function. If this
5011 is inside a start_sequence, make the outer-level insn chain current, so
5012 the code is placed at the start of the function. */
5013 push_topmost_sequence ();
5014 emit_insn_after (seq
, entry_of_function ());
5015 pop_topmost_sequence ();
5020 /* Expand a call to __builtin_next_arg. */
5023 expand_builtin_next_arg (void)
5025 /* Checking arguments is already done in fold_builtin_next_arg
5026 that must be called before this function. */
5027 return expand_binop (ptr_mode
, add_optab
,
5028 crtl
->args
.internal_arg_pointer
,
5029 crtl
->args
.arg_offset_rtx
,
5030 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
5033 /* Make it easier for the backends by protecting the valist argument
5034 from multiple evaluations. */
5037 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
5039 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
5041 /* The current way of determining the type of valist is completely
5042 bogus. We should have the information on the va builtin instead. */
5044 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
5046 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
5048 if (TREE_SIDE_EFFECTS (valist
))
5049 valist
= save_expr (valist
);
5051 /* For this case, the backends will be expecting a pointer to
5052 vatype, but it's possible we've actually been given an array
5053 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5055 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
5057 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
5058 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
5063 tree pt
= build_pointer_type (vatype
);
5067 if (! TREE_SIDE_EFFECTS (valist
))
5070 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
5071 TREE_SIDE_EFFECTS (valist
) = 1;
5074 if (TREE_SIDE_EFFECTS (valist
))
5075 valist
= save_expr (valist
);
5076 valist
= fold_build2_loc (loc
, MEM_REF
,
5077 vatype
, valist
, build_int_cst (pt
, 0));
5083 /* The "standard" definition of va_list is void*. */
5086 std_build_builtin_va_list (void)
5088 return ptr_type_node
;
5091 /* The "standard" abi va_list is va_list_type_node. */
5094 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
5096 return va_list_type_node
;
5099 /* The "standard" type of va_list is va_list_type_node. */
5102 std_canonical_va_list_type (tree type
)
5106 wtype
= va_list_type_node
;
5109 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
5111 /* If va_list is an array type, the argument may have decayed
5112 to a pointer type, e.g. by being passed to another function.
5113 In that case, unwrap both types so that we can compare the
5114 underlying records. */
5115 if (TREE_CODE (htype
) == ARRAY_TYPE
5116 || POINTER_TYPE_P (htype
))
5118 wtype
= TREE_TYPE (wtype
);
5119 htype
= TREE_TYPE (htype
);
5122 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
5123 return va_list_type_node
;
5128 /* The "standard" implementation of va_start: just assign `nextarg' to
5132 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
5134 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5135 convert_move (va_r
, nextarg
, 0);
5138 /* Expand EXP, a call to __builtin_va_start. */
5141 expand_builtin_va_start (tree exp
)
5145 location_t loc
= EXPR_LOCATION (exp
);
5147 if (call_expr_nargs (exp
) < 2)
5149 error_at (loc
, "too few arguments to function %<va_start%>");
5153 if (fold_builtin_next_arg (exp
, true))
5156 nextarg
= expand_builtin_next_arg ();
5157 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
5159 if (targetm
.expand_builtin_va_start
)
5160 targetm
.expand_builtin_va_start (valist
, nextarg
);
5162 std_expand_builtin_va_start (valist
, nextarg
);
5167 /* Expand EXP, a call to __builtin_va_end. */
5170 expand_builtin_va_end (tree exp
)
5172 tree valist
= CALL_EXPR_ARG (exp
, 0);
5174 /* Evaluate for side effects, if needed. I hate macros that don't
5176 if (TREE_SIDE_EFFECTS (valist
))
5177 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5182 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5183 builtin rather than just as an assignment in stdarg.h because of the
5184 nastiness of array-type va_list types. */
5187 expand_builtin_va_copy (tree exp
)
5190 location_t loc
= EXPR_LOCATION (exp
);
5192 dst
= CALL_EXPR_ARG (exp
, 0);
5193 src
= CALL_EXPR_ARG (exp
, 1);
5195 dst
= stabilize_va_list_loc (loc
, dst
, 1);
5196 src
= stabilize_va_list_loc (loc
, src
, 0);
5198 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5200 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5202 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5203 TREE_SIDE_EFFECTS (t
) = 1;
5204 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5208 rtx dstb
, srcb
, size
;
5210 /* Evaluate to pointers. */
5211 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5212 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5213 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5214 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5216 dstb
= convert_memory_address (Pmode
, dstb
);
5217 srcb
= convert_memory_address (Pmode
, srcb
);
5219 /* "Dereference" to BLKmode memories. */
5220 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5221 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5222 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5223 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5224 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5225 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5228 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5234 /* Expand a call to one of the builtin functions __builtin_frame_address or
5235 __builtin_return_address. */
5238 expand_builtin_frame_address (tree fndecl
, tree exp
)
5240 /* The argument must be a nonnegative integer constant.
5241 It counts the number of frames to scan up the stack.
5242 The value is either the frame pointer value or the return
5243 address saved in that frame. */
5244 if (call_expr_nargs (exp
) == 0)
5245 /* Warning about missing arg was already issued. */
5247 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
5249 error ("invalid argument to %qD", fndecl
);
5254 /* Number of frames to scan up the stack. */
5255 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5257 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5259 /* Some ports cannot access arbitrary stack frames. */
5262 warning (0, "unsupported argument to %qD", fndecl
);
5268 /* Warn since no effort is made to ensure that any frame
5269 beyond the current one exists or can be safely reached. */
5270 warning (OPT_Wframe_address
, "calling %qD with "
5271 "a nonzero argument is unsafe", fndecl
);
5274 /* For __builtin_frame_address, return what we've got. */
5275 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5279 && ! CONSTANT_P (tem
))
5280 tem
= copy_addr_to_reg (tem
);
5285 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5286 failed and the caller should emit a normal call. */
5289 expand_builtin_alloca (tree exp
)
5294 tree fndecl
= get_callee_fndecl (exp
);
5295 HOST_WIDE_INT max_size
;
5296 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5297 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5299 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5300 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5302 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5303 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5304 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5309 /* Compute the argument. */
5310 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5312 /* Compute the alignment. */
5313 align
= (fcode
== BUILT_IN_ALLOCA
5315 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5317 /* Compute the maximum size. */
5318 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5319 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5322 /* Allocate the desired space. If the allocation stems from the declaration
5323 of a variable-sized object, it cannot accumulate. */
5325 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5326 result
= convert_memory_address (ptr_mode
, result
);
5328 /* Dynamic allocations for variables are recorded during gimplification. */
5329 if (!alloca_for_var
&& (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
))
5330 record_dynamic_alloc (exp
);
5335 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5336 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5337 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5338 handle_builtin_stack_restore function. */
5341 expand_asan_emit_allocas_unpoison (tree exp
)
5343 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5344 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5345 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5346 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5347 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
5348 stack_pointer_rtx
, NULL_RTX
, 0,
5350 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
5351 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
5353 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5354 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5355 top
, ptr_mode
, bot
, ptr_mode
);
5359 /* Expand a call to bswap builtin in EXP.
5360 Return NULL_RTX if a normal call should be emitted rather than expanding the
5361 function in-line. If convenient, the result should be placed in TARGET.
5362 SUBTARGET may be used as the target for computing one of EXP's operands. */
5365 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5371 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5374 arg
= CALL_EXPR_ARG (exp
, 0);
5375 op0
= expand_expr (arg
,
5376 subtarget
&& GET_MODE (subtarget
) == target_mode
5377 ? subtarget
: NULL_RTX
,
5378 target_mode
, EXPAND_NORMAL
);
5379 if (GET_MODE (op0
) != target_mode
)
5380 op0
= convert_to_mode (target_mode
, op0
, 1);
5382 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5384 gcc_assert (target
);
5386 return convert_to_mode (target_mode
, target
, 1);
5389 /* Expand a call to a unary builtin in EXP.
5390 Return NULL_RTX if a normal call should be emitted rather than expanding the
5391 function in-line. If convenient, the result should be placed in TARGET.
5392 SUBTARGET may be used as the target for computing one of EXP's operands. */
5395 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5396 rtx subtarget
, optab op_optab
)
5400 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5403 /* Compute the argument. */
5404 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5406 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5407 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5408 VOIDmode
, EXPAND_NORMAL
);
5409 /* Compute op, into TARGET if possible.
5410 Set TARGET to wherever the result comes back. */
5411 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5412 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5413 gcc_assert (target
);
5415 return convert_to_mode (target_mode
, target
, 0);
5418 /* Expand a call to __builtin_expect. We just return our argument
5419 as the builtin_expect semantic should've been already executed by
5420 tree branch prediction pass. */
5423 expand_builtin_expect (tree exp
, rtx target
)
5427 if (call_expr_nargs (exp
) < 2)
5429 arg
= CALL_EXPR_ARG (exp
, 0);
5431 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5432 /* When guessing was done, the hints should be already stripped away. */
5433 gcc_assert (!flag_guess_branch_prob
5434 || optimize
== 0 || seen_error ());
5438 /* Expand a call to __builtin_expect_with_probability. We just return our
5439 argument as the builtin_expect semantic should've been already executed by
5440 tree branch prediction pass. */
5443 expand_builtin_expect_with_probability (tree exp
, rtx target
)
5447 if (call_expr_nargs (exp
) < 3)
5449 arg
= CALL_EXPR_ARG (exp
, 0);
5451 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5452 /* When guessing was done, the hints should be already stripped away. */
5453 gcc_assert (!flag_guess_branch_prob
5454 || optimize
== 0 || seen_error ());
5459 /* Expand a call to __builtin_assume_aligned. We just return our first
5460 argument as the builtin_assume_aligned semantic should've been already
5464 expand_builtin_assume_aligned (tree exp
, rtx target
)
5466 if (call_expr_nargs (exp
) < 2)
5468 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5470 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5471 && (call_expr_nargs (exp
) < 3
5472 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5477 expand_builtin_trap (void)
5479 if (targetm
.have_trap ())
5481 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5482 /* For trap insns when not accumulating outgoing args force
5483 REG_ARGS_SIZE note to prevent crossjumping of calls with
5484 different args sizes. */
5485 if (!ACCUMULATE_OUTGOING_ARGS
)
5486 add_args_size_note (insn
, stack_pointer_delta
);
5490 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5491 tree call_expr
= build_call_expr (fn
, 0);
5492 expand_call (call_expr
, NULL_RTX
, false);
5498 /* Expand a call to __builtin_unreachable. We do nothing except emit
5499 a barrier saying that control flow will not pass here.
5501 It is the responsibility of the program being compiled to ensure
5502 that control flow does never reach __builtin_unreachable. */
5504 expand_builtin_unreachable (void)
5506 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5508 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE
));
5512 /* Expand EXP, a call to fabs, fabsf or fabsl.
5513 Return NULL_RTX if a normal call should be emitted rather than expanding
5514 the function inline. If convenient, the result should be placed
5515 in TARGET. SUBTARGET may be used as the target for computing
5519 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5525 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5528 arg
= CALL_EXPR_ARG (exp
, 0);
5529 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5530 mode
= TYPE_MODE (TREE_TYPE (arg
));
5531 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5532 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5535 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5536 Return NULL is a normal call should be emitted rather than expanding the
5537 function inline. If convenient, the result should be placed in TARGET.
5538 SUBTARGET may be used as the target for computing the operand. */
5541 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5546 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5549 arg
= CALL_EXPR_ARG (exp
, 0);
5550 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5552 arg
= CALL_EXPR_ARG (exp
, 1);
5553 op1
= expand_normal (arg
);
5555 return expand_copysign (op0
, op1
, target
);
5558 /* Emit a call to __builtin___clear_cache. */
5561 default_emit_call_builtin___clear_cache (rtx begin
, rtx end
)
5563 rtx callee
= gen_rtx_SYMBOL_REF (Pmode
,
5564 BUILTIN_ASM_NAME_PTR
5565 (BUILT_IN_CLEAR_CACHE
));
5567 emit_library_call (callee
,
5568 LCT_NORMAL
, VOIDmode
,
5569 convert_memory_address (ptr_mode
, begin
), ptr_mode
,
5570 convert_memory_address (ptr_mode
, end
), ptr_mode
);
5573 /* Emit a call to __builtin___clear_cache, unless the target specifies
5574 it as do-nothing. This function can be used by trampoline
5575 finalizers to duplicate the effects of expanding a call to the
5576 clear_cache builtin. */
5579 maybe_emit_call_builtin___clear_cache (rtx begin
, rtx end
)
5581 gcc_assert ((GET_MODE (begin
) == ptr_mode
|| GET_MODE (begin
) == Pmode
5582 || CONST_INT_P (begin
))
5583 && (GET_MODE (end
) == ptr_mode
|| GET_MODE (end
) == Pmode
5584 || CONST_INT_P (end
)));
5586 if (targetm
.have_clear_cache ())
5588 /* We have a "clear_cache" insn, and it will handle everything. */
5589 class expand_operand ops
[2];
5591 create_address_operand (&ops
[0], begin
);
5592 create_address_operand (&ops
[1], end
);
5594 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5599 #ifndef CLEAR_INSN_CACHE
5600 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5601 does nothing. There is no need to call it. Do nothing. */
5603 #endif /* CLEAR_INSN_CACHE */
5606 targetm
.calls
.emit_call_builtin___clear_cache (begin
, end
);
5609 /* Expand a call to __builtin___clear_cache. */
5612 expand_builtin___clear_cache (tree exp
)
5615 rtx begin_rtx
, end_rtx
;
5617 /* We must not expand to a library call. If we did, any
5618 fallback library function in libgcc that might contain a call to
5619 __builtin___clear_cache() would recurse infinitely. */
5620 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5622 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5626 begin
= CALL_EXPR_ARG (exp
, 0);
5627 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5629 end
= CALL_EXPR_ARG (exp
, 1);
5630 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5632 maybe_emit_call_builtin___clear_cache (begin_rtx
, end_rtx
);
5635 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5638 round_trampoline_addr (rtx tramp
)
5640 rtx temp
, addend
, mask
;
5642 /* If we don't need too much alignment, we'll have been guaranteed
5643 proper alignment by get_trampoline_type. */
5644 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5647 /* Round address up to desired boundary. */
5648 temp
= gen_reg_rtx (Pmode
);
5649 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5650 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5652 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5653 temp
, 0, OPTAB_LIB_WIDEN
);
5654 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5655 temp
, 0, OPTAB_LIB_WIDEN
);
5661 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5663 tree t_tramp
, t_func
, t_chain
;
5664 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5666 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5667 POINTER_TYPE
, VOID_TYPE
))
5670 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5671 t_func
= CALL_EXPR_ARG (exp
, 1);
5672 t_chain
= CALL_EXPR_ARG (exp
, 2);
5674 r_tramp
= expand_normal (t_tramp
);
5675 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5676 MEM_NOTRAP_P (m_tramp
) = 1;
5678 /* If ONSTACK, the TRAMP argument should be the address of a field
5679 within the local function's FRAME decl. Either way, let's see if
5680 we can fill in the MEM_ATTRs for this memory. */
5681 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5682 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5684 /* Creator of a heap trampoline is responsible for making sure the
5685 address is aligned to at least STACK_BOUNDARY. Normally malloc
5686 will ensure this anyhow. */
5687 tmp
= round_trampoline_addr (r_tramp
);
5690 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5691 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5692 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5695 /* The FUNC argument should be the address of the nested function.
5696 Extract the actual function decl to pass to the hook. */
5697 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5698 t_func
= TREE_OPERAND (t_func
, 0);
5699 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5701 r_chain
= expand_normal (t_chain
);
5703 /* Generate insns to initialize the trampoline. */
5704 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5708 trampolines_created
= 1;
5710 if (targetm
.calls
.custom_function_descriptors
!= 0)
5711 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5712 "trampoline generated for nested function %qD", t_func
);
5719 expand_builtin_adjust_trampoline (tree exp
)
5723 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5726 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5727 tramp
= round_trampoline_addr (tramp
);
5728 if (targetm
.calls
.trampoline_adjust_address
)
5729 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5734 /* Expand a call to the builtin descriptor initialization routine.
5735 A descriptor is made up of a couple of pointers to the static
5736 chain and the code entry in this order. */
5739 expand_builtin_init_descriptor (tree exp
)
5741 tree t_descr
, t_func
, t_chain
;
5742 rtx m_descr
, r_descr
, r_func
, r_chain
;
5744 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5748 t_descr
= CALL_EXPR_ARG (exp
, 0);
5749 t_func
= CALL_EXPR_ARG (exp
, 1);
5750 t_chain
= CALL_EXPR_ARG (exp
, 2);
5752 r_descr
= expand_normal (t_descr
);
5753 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5754 MEM_NOTRAP_P (m_descr
) = 1;
5755 set_mem_align (m_descr
, GET_MODE_ALIGNMENT (ptr_mode
));
5757 r_func
= expand_normal (t_func
);
5758 r_chain
= expand_normal (t_chain
);
5760 /* Generate insns to initialize the descriptor. */
5761 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5762 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5763 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5768 /* Expand a call to the builtin descriptor adjustment routine. */
5771 expand_builtin_adjust_descriptor (tree exp
)
5775 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5778 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5780 /* Unalign the descriptor to allow runtime identification. */
5781 tramp
= plus_constant (ptr_mode
, tramp
,
5782 targetm
.calls
.custom_function_descriptors
);
5784 return force_operand (tramp
, NULL_RTX
);
5787 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5788 function. The function first checks whether the back end provides
5789 an insn to implement signbit for the respective mode. If not, it
5790 checks whether the floating point format of the value is such that
5791 the sign bit can be extracted. If that is not the case, error out.
5792 EXP is the expression that is a call to the builtin function; if
5793 convenient, the result should be placed in TARGET. */
5795 expand_builtin_signbit (tree exp
, rtx target
)
5797 const struct real_format
*fmt
;
5798 scalar_float_mode fmode
;
5799 scalar_int_mode rmode
, imode
;
5802 enum insn_code icode
;
5804 location_t loc
= EXPR_LOCATION (exp
);
5806 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5809 arg
= CALL_EXPR_ARG (exp
, 0);
5810 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5811 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5812 fmt
= REAL_MODE_FORMAT (fmode
);
5814 arg
= builtin_save_expr (arg
);
5816 /* Expand the argument yielding a RTX expression. */
5817 temp
= expand_normal (arg
);
5819 /* Check if the back end provides an insn that handles signbit for the
5821 icode
= optab_handler (signbit_optab
, fmode
);
5822 if (icode
!= CODE_FOR_nothing
)
5824 rtx_insn
*last
= get_last_insn ();
5825 rtx this_target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5826 if (maybe_emit_unop_insn (icode
, this_target
, temp
, UNKNOWN
))
5828 delete_insns_since (last
);
5831 /* For floating point formats without a sign bit, implement signbit
5833 bitpos
= fmt
->signbit_ro
;
5836 /* But we can't do this if the format supports signed zero. */
5837 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5839 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5840 build_real (TREE_TYPE (arg
), dconst0
));
5841 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5844 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5846 imode
= int_mode_for_mode (fmode
).require ();
5847 temp
= gen_lowpart (imode
, temp
);
5852 /* Handle targets with different FP word orders. */
5853 if (FLOAT_WORDS_BIG_ENDIAN
)
5854 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5856 word
= bitpos
/ BITS_PER_WORD
;
5857 temp
= operand_subword_force (temp
, word
, fmode
);
5858 bitpos
= bitpos
% BITS_PER_WORD
;
5861 /* Force the intermediate word_mode (or narrower) result into a
5862 register. This avoids attempting to create paradoxical SUBREGs
5863 of floating point modes below. */
5864 temp
= force_reg (imode
, temp
);
5866 /* If the bitpos is within the "result mode" lowpart, the operation
5867 can be implement with a single bitwise AND. Otherwise, we need
5868 a right shift and an AND. */
5870 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5872 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5874 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5875 temp
= gen_lowpart (rmode
, temp
);
5876 temp
= expand_binop (rmode
, and_optab
, temp
,
5877 immed_wide_int_const (mask
, rmode
),
5878 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5882 /* Perform a logical right shift to place the signbit in the least
5883 significant bit, then truncate the result to the desired mode
5884 and mask just this bit. */
5885 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5886 temp
= gen_lowpart (rmode
, temp
);
5887 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5888 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5894 /* Expand fork or exec calls. TARGET is the desired target of the
5895 call. EXP is the call. FN is the
5896 identificator of the actual function. IGNORE is nonzero if the
5897 value is to be ignored. */
5900 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5905 /* If we are not profiling, just call the function. */
5906 if (!profile_arc_flag
)
5909 /* Otherwise call the wrapper. This should be equivalent for the rest of
5910 compiler, so the code does not diverge, and the wrapper may run the
5911 code necessary for keeping the profiling sane. */
5913 switch (DECL_FUNCTION_CODE (fn
))
5916 id
= get_identifier ("__gcov_fork");
5919 case BUILT_IN_EXECL
:
5920 id
= get_identifier ("__gcov_execl");
5923 case BUILT_IN_EXECV
:
5924 id
= get_identifier ("__gcov_execv");
5927 case BUILT_IN_EXECLP
:
5928 id
= get_identifier ("__gcov_execlp");
5931 case BUILT_IN_EXECLE
:
5932 id
= get_identifier ("__gcov_execle");
5935 case BUILT_IN_EXECVP
:
5936 id
= get_identifier ("__gcov_execvp");
5939 case BUILT_IN_EXECVE
:
5940 id
= get_identifier ("__gcov_execve");
5947 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5948 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5949 DECL_EXTERNAL (decl
) = 1;
5950 TREE_PUBLIC (decl
) = 1;
5951 DECL_ARTIFICIAL (decl
) = 1;
5952 TREE_NOTHROW (decl
) = 1;
5953 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5954 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5955 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5956 return expand_call (call
, target
, ignore
);
5961 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5962 the pointer in these functions is void*, the tree optimizers may remove
5963 casts. The mode computed in expand_builtin isn't reliable either, due
5964 to __sync_bool_compare_and_swap.
5966 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5967 group of builtins. This gives us log2 of the mode size. */
5969 static inline machine_mode
5970 get_builtin_sync_mode (int fcode_diff
)
5972 /* The size is not negotiable, so ask not to get BLKmode in return
5973 if the target indicates that a smaller size would be better. */
5974 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5977 /* Expand the memory expression LOC and return the appropriate memory operand
5978 for the builtin_sync operations. */
5981 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5984 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
5985 ? TREE_TYPE (TREE_TYPE (loc
))
5987 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
5989 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
5990 addr
= convert_memory_address (addr_mode
, addr
);
5992 /* Note that we explicitly do not want any alias information for this
5993 memory, so that we kill all other live memories. Otherwise we don't
5994 satisfy the full barrier semantics of the intrinsic. */
5995 mem
= gen_rtx_MEM (mode
, addr
);
5997 set_mem_addr_space (mem
, addr_space
);
5999 mem
= validize_mem (mem
);
6001 /* The alignment needs to be at least according to that of the mode. */
6002 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
6003 get_pointer_alignment (loc
)));
6004 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
6005 MEM_VOLATILE_P (mem
) = 1;
6010 /* Make sure an argument is in the right mode.
6011 EXP is the tree argument.
6012 MODE is the mode it should be in. */
6015 expand_expr_force_mode (tree exp
, machine_mode mode
)
6018 machine_mode old_mode
;
6020 if (TREE_CODE (exp
) == SSA_NAME
6021 && TYPE_MODE (TREE_TYPE (exp
)) != mode
)
6023 /* Undo argument promotion if possible, as combine might not
6024 be able to do it later due to MEM_VOLATILE_P uses in the
6026 gimple
*g
= get_gimple_for_ssa_name (exp
);
6027 if (g
&& gimple_assign_cast_p (g
))
6029 tree rhs
= gimple_assign_rhs1 (g
);
6030 tree_code code
= gimple_assign_rhs_code (g
);
6031 if (CONVERT_EXPR_CODE_P (code
)
6032 && TYPE_MODE (TREE_TYPE (rhs
)) == mode
6033 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6034 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6035 && (TYPE_PRECISION (TREE_TYPE (exp
))
6036 > TYPE_PRECISION (TREE_TYPE (rhs
))))
6041 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6042 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6043 of CONST_INTs, where we know the old_mode only from the call argument. */
6045 old_mode
= GET_MODE (val
);
6046 if (old_mode
== VOIDmode
)
6047 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
6048 val
= convert_modes (mode
, old_mode
, val
, 1);
6053 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6054 EXP is the CALL_EXPR. CODE is the rtx code
6055 that corresponds to the arithmetic or logical operation from the name;
6056 an exception here is that NOT actually means NAND. TARGET is an optional
6057 place for us to store the results; AFTER is true if this is the
6058 fetch_and_xxx form. */
6061 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
6062 enum rtx_code code
, bool after
,
6066 location_t loc
= EXPR_LOCATION (exp
);
6068 if (code
== NOT
&& warn_sync_nand
)
6070 tree fndecl
= get_callee_fndecl (exp
);
6071 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6073 static bool warned_f_a_n
, warned_n_a_f
;
6077 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6078 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6079 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6080 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6081 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6085 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
6086 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6087 warned_f_a_n
= true;
6090 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6091 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6092 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6093 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6094 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6098 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
6099 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6100 warned_n_a_f
= true;
6108 /* Expand the operands. */
6109 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6110 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6112 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
6116 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6117 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6118 true if this is the boolean form. TARGET is a place for us to store the
6119 results; this is NOT optional if IS_BOOL is true. */
6122 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
6123 bool is_bool
, rtx target
)
6125 rtx old_val
, new_val
, mem
;
6128 /* Expand the operands. */
6129 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6130 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6131 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6133 pbool
= poval
= NULL
;
6134 if (target
!= const0_rtx
)
6141 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
6142 false, MEMMODEL_SYNC_SEQ_CST
,
6143 MEMMODEL_SYNC_SEQ_CST
))
6149 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6150 general form is actually an atomic exchange, and some targets only
6151 support a reduced form with the second argument being a constant 1.
6152 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6156 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
6161 /* Expand the operands. */
6162 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6163 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6165 return expand_sync_lock_test_and_set (target
, mem
, val
);
6168 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6171 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
6175 /* Expand the operands. */
6176 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6178 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
6181 /* Given an integer representing an ``enum memmodel'', verify its
6182 correctness and return the memory model enum. */
6184 static enum memmodel
6185 get_memmodel (tree exp
)
6187 /* If the parameter is not a constant, it's a run time value so we'll just
6188 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6189 if (TREE_CODE (exp
) != INTEGER_CST
)
6190 return MEMMODEL_SEQ_CST
;
6192 rtx op
= expand_normal (exp
);
6194 unsigned HOST_WIDE_INT val
= INTVAL (op
);
6195 if (targetm
.memmodel_check
)
6196 val
= targetm
.memmodel_check (val
);
6197 else if (val
& ~MEMMODEL_MASK
)
6198 return MEMMODEL_SEQ_CST
;
6200 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6201 if (memmodel_base (val
) >= MEMMODEL_LAST
)
6202 return MEMMODEL_SEQ_CST
;
6204 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6205 be conservative and promote consume to acquire. */
6206 if (val
== MEMMODEL_CONSUME
)
6207 val
= MEMMODEL_ACQUIRE
;
6209 return (enum memmodel
) val
;
6212 /* Expand the __atomic_exchange intrinsic:
6213 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6214 EXP is the CALL_EXPR.
6215 TARGET is an optional place for us to store the results. */
6218 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
6221 enum memmodel model
;
6223 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6225 if (!flag_inline_atomics
)
6228 /* Expand the operands. */
6229 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6230 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6232 return expand_atomic_exchange (target
, mem
, val
, model
);
6235 /* Expand the __atomic_compare_exchange intrinsic:
6236 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6237 TYPE desired, BOOL weak,
6238 enum memmodel success,
6239 enum memmodel failure)
6240 EXP is the CALL_EXPR.
6241 TARGET is an optional place for us to store the results. */
6244 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
6247 rtx expect
, desired
, mem
, oldval
;
6248 rtx_code_label
*label
;
6252 memmodel success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
6253 memmodel failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
6255 if (failure
> success
)
6256 success
= MEMMODEL_SEQ_CST
;
6258 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6260 failure
= MEMMODEL_SEQ_CST
;
6261 success
= MEMMODEL_SEQ_CST
;
6265 if (!flag_inline_atomics
)
6268 /* Expand the operands. */
6269 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6271 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6272 expect
= convert_memory_address (Pmode
, expect
);
6273 expect
= gen_rtx_MEM (mode
, expect
);
6274 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6276 weak
= CALL_EXPR_ARG (exp
, 3);
6278 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
6281 if (target
== const0_rtx
)
6284 /* Lest the rtl backend create a race condition with an imporoper store
6285 to memory, always create a new pseudo for OLDVAL. */
6288 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
6289 is_weak
, success
, failure
))
6292 /* Conditionally store back to EXPECT, lest we create a race condition
6293 with an improper store to memory. */
6294 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6295 the normal case where EXPECT is totally private, i.e. a register. At
6296 which point the store can be unconditional. */
6297 label
= gen_label_rtx ();
6298 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
6299 GET_MODE (target
), 1, label
);
6300 emit_move_insn (expect
, oldval
);
6306 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6307 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6308 call. The weak parameter must be dropped to match the expected parameter
6309 list and the expected argument changed from value to pointer to memory
6313 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
6316 vec
<tree
, va_gc
> *vec
;
6319 vec
->quick_push (gimple_call_arg (call
, 0));
6320 tree expected
= gimple_call_arg (call
, 1);
6321 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
6322 TREE_TYPE (expected
));
6323 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
6325 emit_move_insn (x
, expd
);
6326 tree v
= make_tree (TREE_TYPE (expected
), x
);
6327 vec
->quick_push (build1 (ADDR_EXPR
,
6328 build_pointer_type (TREE_TYPE (expected
)), v
));
6329 vec
->quick_push (gimple_call_arg (call
, 2));
6330 /* Skip the boolean weak parameter. */
6331 for (z
= 4; z
< 6; z
++)
6332 vec
->quick_push (gimple_call_arg (call
, z
));
6333 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6334 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
6335 gcc_assert (bytes_log2
< 5);
6336 built_in_function fncode
6337 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6339 tree fndecl
= builtin_decl_explicit (fncode
);
6340 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
6342 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
6343 tree lhs
= gimple_call_lhs (call
);
6344 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
6347 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6348 if (GET_MODE (boolret
) != mode
)
6349 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6350 x
= force_reg (mode
, x
);
6351 write_complex_part (target
, boolret
, true, true);
6352 write_complex_part (target
, x
, false, false);
6356 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6359 expand_ifn_atomic_compare_exchange (gcall
*call
)
6361 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6362 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6363 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6365 memmodel success
= get_memmodel (gimple_call_arg (call
, 4));
6366 memmodel failure
= get_memmodel (gimple_call_arg (call
, 5));
6368 if (failure
> success
)
6369 success
= MEMMODEL_SEQ_CST
;
6371 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6373 failure
= MEMMODEL_SEQ_CST
;
6374 success
= MEMMODEL_SEQ_CST
;
6377 if (!flag_inline_atomics
)
6379 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6383 /* Expand the operands. */
6384 rtx mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6386 rtx expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6387 rtx desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6389 bool is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6394 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6395 is_weak
, success
, failure
))
6397 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6401 tree lhs
= gimple_call_lhs (call
);
6404 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6405 if (GET_MODE (boolret
) != mode
)
6406 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6407 write_complex_part (target
, boolret
, true, true);
6408 write_complex_part (target
, oldval
, false, false);
6412 /* Expand the __atomic_load intrinsic:
6413 TYPE __atomic_load (TYPE *object, enum memmodel)
6414 EXP is the CALL_EXPR.
6415 TARGET is an optional place for us to store the results. */
6418 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6420 memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6421 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6422 model
= MEMMODEL_SEQ_CST
;
6424 if (!flag_inline_atomics
)
6427 /* Expand the operand. */
6428 rtx mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6430 return expand_atomic_load (target
, mem
, model
);
6434 /* Expand the __atomic_store intrinsic:
6435 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6436 EXP is the CALL_EXPR.
6437 TARGET is an optional place for us to store the results. */
6440 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6442 memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6443 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6444 || is_mm_release (model
)))
6445 model
= MEMMODEL_SEQ_CST
;
6447 if (!flag_inline_atomics
)
6450 /* Expand the operands. */
6451 rtx mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6452 rtx val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6454 return expand_atomic_store (mem
, val
, model
, false);
6457 /* Expand the __atomic_fetch_XXX intrinsic:
6458 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6459 EXP is the CALL_EXPR.
6460 TARGET is an optional place for us to store the results.
6461 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6462 FETCH_AFTER is true if returning the result of the operation.
6463 FETCH_AFTER is false if returning the value before the operation.
6464 IGNORE is true if the result is not used.
6465 EXT_CALL is the correct builtin for an external call if this cannot be
6466 resolved to an instruction sequence. */
6469 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6470 enum rtx_code code
, bool fetch_after
,
6471 bool ignore
, enum built_in_function ext_call
)
6474 enum memmodel model
;
6478 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6480 /* Expand the operands. */
6481 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6482 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6484 /* Only try generating instructions if inlining is turned on. */
6485 if (flag_inline_atomics
)
6487 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6492 /* Return if a different routine isn't needed for the library call. */
6493 if (ext_call
== BUILT_IN_NONE
)
6496 /* Change the call to the specified function. */
6497 fndecl
= get_callee_fndecl (exp
);
6498 addr
= CALL_EXPR_FN (exp
);
6501 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6502 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6504 /* If we will emit code after the call, the call cannot be a tail call.
6505 If it is emitted as a tail call, a barrier is emitted after it, and
6506 then all trailing code is removed. */
6508 CALL_EXPR_TAILCALL (exp
) = 0;
6510 /* Expand the call here so we can emit trailing code. */
6511 ret
= expand_call (exp
, target
, ignore
);
6513 /* Replace the original function just in case it matters. */
6514 TREE_OPERAND (addr
, 0) = fndecl
;
6516 /* Then issue the arithmetic correction to return the right result. */
6521 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6523 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6526 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6532 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6535 expand_ifn_atomic_bit_test_and (gcall
*call
)
6537 tree ptr
= gimple_call_arg (call
, 0);
6538 tree bit
= gimple_call_arg (call
, 1);
6539 tree flag
= gimple_call_arg (call
, 2);
6540 tree lhs
= gimple_call_lhs (call
);
6541 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6542 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6545 class expand_operand ops
[5];
6547 gcc_assert (flag_inline_atomics
);
6549 if (gimple_call_num_args (call
) == 5)
6550 model
= get_memmodel (gimple_call_arg (call
, 3));
6552 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6553 rtx val
= expand_expr_force_mode (bit
, mode
);
6555 switch (gimple_call_internal_fn (call
))
6557 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6559 optab
= atomic_bit_test_and_set_optab
;
6561 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6563 optab
= atomic_bit_test_and_complement_optab
;
6565 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6567 optab
= atomic_bit_test_and_reset_optab
;
6573 if (lhs
== NULL_TREE
)
6575 rtx val2
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6576 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6578 val2
= expand_simple_unop (mode
, NOT
, val2
, NULL_RTX
, true);
6579 if (expand_atomic_fetch_op (const0_rtx
, mem
, val2
, code
, model
, false))
6585 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6587 target
= gen_reg_rtx (mode
);
6588 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6589 gcc_assert (icode
!= CODE_FOR_nothing
);
6590 create_output_operand (&ops
[0], target
, mode
);
6591 create_fixed_operand (&ops
[1], mem
);
6592 create_convert_operand_to (&ops
[2], val
, mode
, true);
6593 create_integer_operand (&ops
[3], model
);
6594 create_integer_operand (&ops
[4], integer_onep (flag
));
6595 if (maybe_expand_insn (icode
, 5, ops
))
6599 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6600 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6603 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6604 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6605 code
, model
, false);
6608 bool is_atomic
= gimple_call_num_args (call
) == 5;
6609 tree tcall
= gimple_call_arg (call
, 3 + is_atomic
);
6610 tree fndecl
= gimple_call_addr_fndecl (tcall
);
6611 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
6612 tree exp
= build_call_nary (type
, tcall
, 2 + is_atomic
, ptr
,
6613 make_tree (type
, val
),
6615 ? gimple_call_arg (call
, 3)
6616 : integer_zero_node
);
6617 result
= expand_builtin (exp
, gen_reg_rtx (mode
), NULL_RTX
,
6622 if (integer_onep (flag
))
6624 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6625 NULL_RTX
, true, OPTAB_DIRECT
);
6626 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6627 true, OPTAB_DIRECT
);
6630 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6632 if (result
!= target
)
6633 emit_move_insn (target
, result
);
6636 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6639 expand_ifn_atomic_op_fetch_cmp_0 (gcall
*call
)
6641 tree cmp
= gimple_call_arg (call
, 0);
6642 tree ptr
= gimple_call_arg (call
, 1);
6643 tree arg
= gimple_call_arg (call
, 2);
6644 tree lhs
= gimple_call_lhs (call
);
6645 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6646 machine_mode mode
= TYPE_MODE (TREE_TYPE (cmp
));
6649 class expand_operand ops
[5];
6651 gcc_assert (flag_inline_atomics
);
6653 if (gimple_call_num_args (call
) == 5)
6654 model
= get_memmodel (gimple_call_arg (call
, 3));
6656 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6657 rtx op
= expand_expr_force_mode (arg
, mode
);
6659 switch (gimple_call_internal_fn (call
))
6661 case IFN_ATOMIC_ADD_FETCH_CMP_0
:
6663 optab
= atomic_add_fetch_cmp_0_optab
;
6665 case IFN_ATOMIC_SUB_FETCH_CMP_0
:
6667 optab
= atomic_sub_fetch_cmp_0_optab
;
6669 case IFN_ATOMIC_AND_FETCH_CMP_0
:
6671 optab
= atomic_and_fetch_cmp_0_optab
;
6673 case IFN_ATOMIC_OR_FETCH_CMP_0
:
6675 optab
= atomic_or_fetch_cmp_0_optab
;
6677 case IFN_ATOMIC_XOR_FETCH_CMP_0
:
6679 optab
= atomic_xor_fetch_cmp_0_optab
;
6685 enum rtx_code comp
= UNKNOWN
;
6686 switch (tree_to_uhwi (cmp
))
6688 case ATOMIC_OP_FETCH_CMP_0_EQ
: comp
= EQ
; break;
6689 case ATOMIC_OP_FETCH_CMP_0_NE
: comp
= NE
; break;
6690 case ATOMIC_OP_FETCH_CMP_0_GT
: comp
= GT
; break;
6691 case ATOMIC_OP_FETCH_CMP_0_GE
: comp
= GE
; break;
6692 case ATOMIC_OP_FETCH_CMP_0_LT
: comp
= LT
; break;
6693 case ATOMIC_OP_FETCH_CMP_0_LE
: comp
= LE
; break;
6694 default: gcc_unreachable ();
6698 if (lhs
== NULL_TREE
)
6699 target
= gen_reg_rtx (TYPE_MODE (boolean_type_node
));
6701 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6702 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6703 gcc_assert (icode
!= CODE_FOR_nothing
);
6704 create_output_operand (&ops
[0], target
, TYPE_MODE (boolean_type_node
));
6705 create_fixed_operand (&ops
[1], mem
);
6706 create_convert_operand_to (&ops
[2], op
, mode
, true);
6707 create_integer_operand (&ops
[3], model
);
6708 create_integer_operand (&ops
[4], comp
);
6709 if (maybe_expand_insn (icode
, 5, ops
))
6712 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, op
,
6716 bool is_atomic
= gimple_call_num_args (call
) == 5;
6717 tree tcall
= gimple_call_arg (call
, 3 + is_atomic
);
6718 tree fndecl
= gimple_call_addr_fndecl (tcall
);
6719 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
6720 tree exp
= build_call_nary (type
, tcall
,
6721 2 + is_atomic
, ptr
, arg
,
6723 ? gimple_call_arg (call
, 3)
6724 : integer_zero_node
);
6725 result
= expand_builtin (exp
, gen_reg_rtx (mode
), NULL_RTX
,
6731 result
= emit_store_flag_force (target
, comp
, result
, const0_rtx
, mode
,
6733 if (result
!= target
)
6734 emit_move_insn (target
, result
);
6738 /* Expand an atomic clear operation.
6739 void _atomic_clear (BOOL *obj, enum memmodel)
6740 EXP is the call expression. */
6743 expand_builtin_atomic_clear (tree exp
)
6745 machine_mode mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6746 rtx mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6747 memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6749 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6750 model
= MEMMODEL_SEQ_CST
;
6752 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6753 Failing that, a store is issued by __atomic_store. The only way this can
6754 fail is if the bool type is larger than a word size. Unlikely, but
6755 handle it anyway for completeness. Assume a single threaded model since
6756 there is no atomic support in this case, and no barriers are required. */
6757 rtx ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6759 emit_move_insn (mem
, const0_rtx
);
6763 /* Expand an atomic test_and_set operation.
6764 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6765 EXP is the call expression. */
6768 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6771 enum memmodel model
;
6774 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6775 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6776 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6778 return expand_atomic_test_and_set (target
, mem
, model
);
6782 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6783 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6786 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6790 unsigned int mode_align
, type_align
;
6792 if (TREE_CODE (arg0
) != INTEGER_CST
)
6795 /* We need a corresponding integer mode for the access to be lock-free. */
6796 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6797 if (!int_mode_for_size (size
, 0).exists (&mode
))
6798 return boolean_false_node
;
6800 mode_align
= GET_MODE_ALIGNMENT (mode
);
6802 if (TREE_CODE (arg1
) == INTEGER_CST
)
6804 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6806 /* Either this argument is null, or it's a fake pointer encoding
6807 the alignment of the object. */
6808 val
= least_bit_hwi (val
);
6809 val
*= BITS_PER_UNIT
;
6811 if (val
== 0 || mode_align
< val
)
6812 type_align
= mode_align
;
6818 tree ttype
= TREE_TYPE (arg1
);
6820 /* This function is usually invoked and folded immediately by the front
6821 end before anything else has a chance to look at it. The pointer
6822 parameter at this point is usually cast to a void *, so check for that
6823 and look past the cast. */
6824 if (CONVERT_EXPR_P (arg1
)
6825 && POINTER_TYPE_P (ttype
)
6826 && VOID_TYPE_P (TREE_TYPE (ttype
))
6827 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6828 arg1
= TREE_OPERAND (arg1
, 0);
6830 ttype
= TREE_TYPE (arg1
);
6831 gcc_assert (POINTER_TYPE_P (ttype
));
6833 /* Get the underlying type of the object. */
6834 ttype
= TREE_TYPE (ttype
);
6835 type_align
= TYPE_ALIGN (ttype
);
6838 /* If the object has smaller alignment, the lock free routines cannot
6840 if (type_align
< mode_align
)
6841 return boolean_false_node
;
6843 /* Check if a compare_and_swap pattern exists for the mode which represents
6844 the required size. The pattern is not allowed to fail, so the existence
6845 of the pattern indicates support is present. Also require that an
6846 atomic load exists for the required size. */
6847 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6848 return boolean_true_node
;
6850 return boolean_false_node
;
6853 /* Return true if the parameters to call EXP represent an object which will
6854 always generate lock free instructions. The first argument represents the
6855 size of the object, and the second parameter is a pointer to the object
6856 itself. If NULL is passed for the object, then the result is based on
6857 typical alignment for an object of the specified size. Otherwise return
6861 expand_builtin_atomic_always_lock_free (tree exp
)
6864 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6865 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6867 if (TREE_CODE (arg0
) != INTEGER_CST
)
6869 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6873 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6874 if (size
== boolean_true_node
)
6879 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6880 is lock free on this architecture. */
6883 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6885 if (!flag_inline_atomics
)
6888 /* If it isn't always lock free, don't generate a result. */
6889 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6890 return boolean_true_node
;
6895 /* Return true if the parameters to call EXP represent an object which will
6896 always generate lock free instructions. The first argument represents the
6897 size of the object, and the second parameter is a pointer to the object
6898 itself. If NULL is passed for the object, then the result is based on
6899 typical alignment for an object of the specified size. Otherwise return
6903 expand_builtin_atomic_is_lock_free (tree exp
)
6906 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6907 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6909 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6911 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6915 if (!flag_inline_atomics
)
6918 /* If the value is known at compile time, return the RTX for it. */
6919 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6920 if (size
== boolean_true_node
)
6926 /* Expand the __atomic_thread_fence intrinsic:
6927 void __atomic_thread_fence (enum memmodel)
6928 EXP is the CALL_EXPR. */
6931 expand_builtin_atomic_thread_fence (tree exp
)
6933 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6934 expand_mem_thread_fence (model
);
6937 /* Expand the __atomic_signal_fence intrinsic:
6938 void __atomic_signal_fence (enum memmodel)
6939 EXP is the CALL_EXPR. */
6942 expand_builtin_atomic_signal_fence (tree exp
)
6944 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6945 expand_mem_signal_fence (model
);
6948 /* Expand the __sync_synchronize intrinsic. */
6951 expand_builtin_sync_synchronize (void)
6953 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6957 expand_builtin_thread_pointer (tree exp
, rtx target
)
6959 enum insn_code icode
;
6960 if (!validate_arglist (exp
, VOID_TYPE
))
6962 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6963 if (icode
!= CODE_FOR_nothing
)
6965 class expand_operand op
;
6966 /* If the target is not sutitable then create a new target. */
6967 if (target
== NULL_RTX
6969 || GET_MODE (target
) != Pmode
)
6970 target
= gen_reg_rtx (Pmode
);
6971 create_output_operand (&op
, target
, Pmode
);
6972 expand_insn (icode
, 1, &op
);
6975 error ("%<__builtin_thread_pointer%> is not supported on this target");
6980 expand_builtin_set_thread_pointer (tree exp
)
6982 enum insn_code icode
;
6983 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6985 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6986 if (icode
!= CODE_FOR_nothing
)
6988 class expand_operand op
;
6989 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6990 Pmode
, EXPAND_NORMAL
);
6991 create_input_operand (&op
, val
, Pmode
);
6992 expand_insn (icode
, 1, &op
);
6995 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6999 /* Emit code to restore the current value of stack. */
7002 expand_stack_restore (tree var
)
7005 rtx sa
= expand_normal (var
);
7007 sa
= convert_memory_address (Pmode
, sa
);
7009 prev
= get_last_insn ();
7010 emit_stack_restore (SAVE_BLOCK
, sa
);
7012 record_new_stack_level ();
7014 fixup_args_size_notes (prev
, get_last_insn (), 0);
7017 /* Emit code to save the current value of stack. */
7020 expand_stack_save (void)
7024 emit_stack_save (SAVE_BLOCK
, &ret
);
7028 /* Emit code to get the openacc gang, worker or vector id or size. */
7031 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
7034 rtx fallback_retval
;
7035 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
7036 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
7038 case BUILT_IN_GOACC_PARLEVEL_ID
:
7039 name
= "__builtin_goacc_parlevel_id";
7040 fallback_retval
= const0_rtx
;
7041 gen_fn
= targetm
.gen_oacc_dim_pos
;
7043 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
7044 name
= "__builtin_goacc_parlevel_size";
7045 fallback_retval
= const1_rtx
;
7046 gen_fn
= targetm
.gen_oacc_dim_size
;
7052 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
7054 error ("%qs only supported in OpenACC code", name
);
7058 tree arg
= CALL_EXPR_ARG (exp
, 0);
7059 if (TREE_CODE (arg
) != INTEGER_CST
)
7061 error ("non-constant argument 0 to %qs", name
);
7065 int dim
= TREE_INT_CST_LOW (arg
);
7069 case GOMP_DIM_WORKER
:
7070 case GOMP_DIM_VECTOR
:
7073 error ("illegal argument 0 to %qs", name
);
7080 if (target
== NULL_RTX
)
7081 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7083 if (!targetm
.have_oacc_dim_size ())
7085 emit_move_insn (target
, fallback_retval
);
7089 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
7090 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
7092 emit_move_insn (target
, reg
);
7097 /* Expand a string compare operation using a sequence of char comparison
7098 to get rid of the calling overhead, with result going to TARGET if
7101 VAR_STR is the variable string source;
7102 CONST_STR is the constant string source;
7103 LENGTH is the number of chars to compare;
7104 CONST_STR_N indicates which source string is the constant string;
7105 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7107 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7109 target = (int) (unsigned char) var_str[0]
7110 - (int) (unsigned char) const_str[0];
7114 target = (int) (unsigned char) var_str[length - 2]
7115 - (int) (unsigned char) const_str[length - 2];
7118 target = (int) (unsigned char) var_str[length - 1]
7119 - (int) (unsigned char) const_str[length - 1];
7124 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
7125 unsigned HOST_WIDE_INT length
,
7126 int const_str_n
, machine_mode mode
)
7128 HOST_WIDE_INT offset
= 0;
7130 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
7131 rtx var_rtx
= NULL_RTX
;
7132 rtx const_rtx
= NULL_RTX
;
7133 rtx result
= target
? target
: gen_reg_rtx (mode
);
7134 rtx_code_label
*ne_label
= gen_label_rtx ();
7135 tree unit_type_node
= unsigned_char_type_node
;
7136 scalar_int_mode unit_mode
7137 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
7141 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
7144 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
7145 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
7146 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
7147 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
7149 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
7150 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
7151 rtx diff
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
7152 result
, 1, OPTAB_WIDEN
);
7154 /* Force the difference into result register. We cannot reassign
7155 result here ("result = diff") or we may end up returning
7156 uninitialized result when expand_simple_binop allocates a new
7157 pseudo-register for returning. */
7159 emit_move_insn (result
, diff
);
7162 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
7163 mode
, true, ne_label
);
7164 offset
+= GET_MODE_SIZE (unit_mode
);
7167 emit_label (ne_label
);
7168 rtx_insn
*insns
= get_insns ();
7175 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7176 to TARGET if that's convenient.
7177 If the call is not been inlined, return NULL_RTX. */
7180 inline_expand_builtin_bytecmp (tree exp
, rtx target
)
7182 tree fndecl
= get_callee_fndecl (exp
);
7183 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7184 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
7186 /* Do NOT apply this inlining expansion when optimizing for size or
7187 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7188 if (optimize
< 2 || optimize_insn_for_size_p () || target
== const0_rtx
)
7191 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
7192 || fcode
== BUILT_IN_STRNCMP
7193 || fcode
== BUILT_IN_MEMCMP
);
7195 /* On a target where the type of the call (int) has same or narrower presicion
7196 than unsigned char, give up the inlining expansion. */
7197 if (TYPE_PRECISION (unsigned_char_type_node
)
7198 >= TYPE_PRECISION (TREE_TYPE (exp
)))
7201 tree arg1
= CALL_EXPR_ARG (exp
, 0);
7202 tree arg2
= CALL_EXPR_ARG (exp
, 1);
7203 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
7205 unsigned HOST_WIDE_INT len1
= 0;
7206 unsigned HOST_WIDE_INT len2
= 0;
7207 unsigned HOST_WIDE_INT len3
= 0;
7209 /* Get the object representation of the initializers of ARG1 and ARG2
7210 as strings, provided they refer to constant objects, with their byte
7211 sizes in LEN1 and LEN2, respectively. */
7212 const char *bytes1
= getbyterep (arg1
, &len1
);
7213 const char *bytes2
= getbyterep (arg2
, &len2
);
7215 /* Fail if neither argument refers to an initialized constant. */
7216 if (!bytes1
&& !bytes2
)
7221 /* Fail if the memcmp/strncmp bound is not a constant. */
7222 if (!tree_fits_uhwi_p (len3_tree
))
7225 len3
= tree_to_uhwi (len3_tree
);
7227 if (fcode
== BUILT_IN_MEMCMP
)
7229 /* Fail if the memcmp bound is greater than the size of either
7230 of the two constant objects. */
7231 if ((bytes1
&& len1
< len3
)
7232 || (bytes2
&& len2
< len3
))
7237 if (fcode
!= BUILT_IN_MEMCMP
)
7239 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7240 and LEN2 to the length of the nul-terminated string stored
7243 len1
= strnlen (bytes1
, len1
) + 1;
7245 len2
= strnlen (bytes2
, len2
) + 1;
7248 /* See inline_string_cmp. */
7254 else if (len2
> len1
)
7259 /* For strncmp only, compute the new bound as the smallest of
7260 the lengths of the two strings (plus 1) and the bound provided
7262 unsigned HOST_WIDE_INT bound
= (const_str_n
== 1) ? len1
: len2
;
7263 if (is_ncmp
&& len3
< bound
)
7266 /* If the bound of the comparison is larger than the threshold,
7268 if (bound
> (unsigned HOST_WIDE_INT
) param_builtin_string_cmp_inline_length
)
7271 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
7273 /* Now, start inline expansion the call. */
7274 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
7275 (const_str_n
== 1) ? bytes1
: bytes2
, bound
,
7279 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7280 represents the size of the first argument to that call, or VOIDmode
7281 if the argument is a pointer. IGNORE will be true if the result
7284 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
7288 unsigned nargs
= call_expr_nargs (exp
);
7290 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7292 if (mode
== VOIDmode
)
7294 mode
= TYPE_MODE (TREE_TYPE (arg0
));
7295 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
7298 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7300 /* An optional second argument can be used as a failsafe value on
7301 some machines. If it isn't present, then the failsafe value is
7305 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7306 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7309 failsafe
= const0_rtx
;
7311 /* If the result isn't used, the behavior is undefined. It would be
7312 nice to emit a warning here, but path splitting means this might
7313 happen with legitimate code. So simply drop the builtin
7314 expansion in that case; we've handled any side-effects above. */
7318 /* If we don't have a suitable target, create one to hold the result. */
7319 if (target
== NULL
|| GET_MODE (target
) != mode
)
7320 target
= gen_reg_rtx (mode
);
7322 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
7323 val
= convert_modes (mode
, VOIDmode
, val
, false);
7325 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
7328 /* Expand an expression EXP that calls a built-in function,
7329 with result going to TARGET if that's convenient
7330 (and in mode MODE if that's convenient).
7331 SUBTARGET may be used as the target for computing one of EXP's operands.
7332 IGNORE is nonzero if the value is to be ignored. */
7335 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
7338 tree fndecl
= get_callee_fndecl (exp
);
7339 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
7342 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7343 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7345 /* When ASan is enabled, we don't want to expand some memory/string
7346 builtins and rely on libsanitizer's hooks. This allows us to avoid
7347 redundant checks and be sure, that possible overflow will be detected
7350 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7351 if (param_asan_kernel_mem_intrinsic_prefix
7352 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7353 | SANITIZE_KERNEL_HWADDRESS
))
7356 rtx save_decl_rtl
, ret
;
7357 case BUILT_IN_MEMCPY
:
7358 case BUILT_IN_MEMMOVE
:
7359 case BUILT_IN_MEMSET
:
7360 save_decl_rtl
= DECL_RTL (fndecl
);
7361 DECL_RTL (fndecl
) = asan_memfn_rtl (fndecl
);
7362 ret
= expand_call (exp
, target
, ignore
);
7363 DECL_RTL (fndecl
) = save_decl_rtl
;
7368 if (sanitize_flags_p (SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
7369 return expand_call (exp
, target
, ignore
);
7371 /* When not optimizing, generate calls to library functions for a certain
7374 && !called_as_built_in (fndecl
)
7375 && fcode
!= BUILT_IN_FORK
7376 && fcode
!= BUILT_IN_EXECL
7377 && fcode
!= BUILT_IN_EXECV
7378 && fcode
!= BUILT_IN_EXECLP
7379 && fcode
!= BUILT_IN_EXECLE
7380 && fcode
!= BUILT_IN_EXECVP
7381 && fcode
!= BUILT_IN_EXECVE
7382 && fcode
!= BUILT_IN_CLEAR_CACHE
7383 && !ALLOCA_FUNCTION_CODE_P (fcode
)
7384 && fcode
!= BUILT_IN_FREE
)
7385 return expand_call (exp
, target
, ignore
);
7387 /* The built-in function expanders test for target == const0_rtx
7388 to determine whether the function's result will be ignored. */
7390 target
= const0_rtx
;
7392 /* If the result of a pure or const built-in function is ignored, and
7393 none of its arguments are volatile, we can avoid expanding the
7394 built-in call and just evaluate the arguments for side-effects. */
7395 if (target
== const0_rtx
7396 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
7397 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
7399 bool volatilep
= false;
7401 call_expr_arg_iterator iter
;
7403 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7404 if (TREE_THIS_VOLATILE (arg
))
7412 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7413 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7420 CASE_FLT_FN (BUILT_IN_FABS
):
7421 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
7422 case BUILT_IN_FABSD32
:
7423 case BUILT_IN_FABSD64
:
7424 case BUILT_IN_FABSD128
:
7425 target
= expand_builtin_fabs (exp
, target
, subtarget
);
7430 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
7431 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
7432 target
= expand_builtin_copysign (exp
, target
, subtarget
);
7437 /* Just do a normal library call if we were unable to fold
7439 CASE_FLT_FN (BUILT_IN_CABS
):
7440 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS
):
7443 CASE_FLT_FN (BUILT_IN_FMA
):
7444 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
7445 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
7450 CASE_FLT_FN (BUILT_IN_ILOGB
):
7451 if (! flag_unsafe_math_optimizations
)
7454 CASE_FLT_FN (BUILT_IN_ISINF
):
7455 CASE_FLT_FN (BUILT_IN_FINITE
):
7456 case BUILT_IN_ISFINITE
:
7457 case BUILT_IN_ISNORMAL
:
7458 target
= expand_builtin_interclass_mathfn (exp
, target
);
7463 case BUILT_IN_ISSIGNALING
:
7464 target
= expand_builtin_issignaling (exp
, target
);
7469 CASE_FLT_FN (BUILT_IN_ICEIL
):
7470 CASE_FLT_FN (BUILT_IN_LCEIL
):
7471 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7472 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7473 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7474 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7475 target
= expand_builtin_int_roundingfn (exp
, target
);
7480 CASE_FLT_FN (BUILT_IN_IRINT
):
7481 CASE_FLT_FN (BUILT_IN_LRINT
):
7482 CASE_FLT_FN (BUILT_IN_LLRINT
):
7483 CASE_FLT_FN (BUILT_IN_IROUND
):
7484 CASE_FLT_FN (BUILT_IN_LROUND
):
7485 CASE_FLT_FN (BUILT_IN_LLROUND
):
7486 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
7491 CASE_FLT_FN (BUILT_IN_POWI
):
7492 target
= expand_builtin_powi (exp
, target
);
7497 CASE_FLT_FN (BUILT_IN_CEXPI
):
7498 target
= expand_builtin_cexpi (exp
, target
);
7499 gcc_assert (target
);
7502 CASE_FLT_FN (BUILT_IN_SIN
):
7503 CASE_FLT_FN (BUILT_IN_COS
):
7504 if (! flag_unsafe_math_optimizations
)
7506 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
7511 CASE_FLT_FN (BUILT_IN_SINCOS
):
7512 if (! flag_unsafe_math_optimizations
)
7514 target
= expand_builtin_sincos (exp
);
7519 case BUILT_IN_FEGETROUND
:
7520 target
= expand_builtin_fegetround (exp
, target
, target_mode
);
7525 case BUILT_IN_FECLEAREXCEPT
:
7526 target
= expand_builtin_feclear_feraise_except (exp
, target
, target_mode
,
7527 feclearexcept_optab
);
7532 case BUILT_IN_FERAISEEXCEPT
:
7533 target
= expand_builtin_feclear_feraise_except (exp
, target
, target_mode
,
7534 feraiseexcept_optab
);
7539 case BUILT_IN_APPLY_ARGS
:
7540 return expand_builtin_apply_args ();
7542 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7543 FUNCTION with a copy of the parameters described by
7544 ARGUMENTS, and ARGSIZE. It returns a block of memory
7545 allocated on the stack into which is stored all the registers
7546 that might possibly be used for returning the result of a
7547 function. ARGUMENTS is the value returned by
7548 __builtin_apply_args. ARGSIZE is the number of bytes of
7549 arguments that must be copied. ??? How should this value be
7550 computed? We'll also need a safe worst case value for varargs
7552 case BUILT_IN_APPLY
:
7553 if (!validate_arglist (exp
, POINTER_TYPE
,
7554 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
7555 && !validate_arglist (exp
, REFERENCE_TYPE
,
7556 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7562 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
7563 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
7564 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
7566 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7569 /* __builtin_return (RESULT) causes the function to return the
7570 value described by RESULT. RESULT is address of the block of
7571 memory returned by __builtin_apply. */
7572 case BUILT_IN_RETURN
:
7573 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7574 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
7577 case BUILT_IN_SAVEREGS
:
7578 return expand_builtin_saveregs ();
7580 case BUILT_IN_VA_ARG_PACK
:
7581 /* All valid uses of __builtin_va_arg_pack () are removed during
7583 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7586 case BUILT_IN_VA_ARG_PACK_LEN
:
7587 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7589 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7592 /* Return the address of the first anonymous stack arg. */
7593 case BUILT_IN_NEXT_ARG
:
7594 if (fold_builtin_next_arg (exp
, false))
7596 return expand_builtin_next_arg ();
7598 case BUILT_IN_CLEAR_CACHE
:
7599 expand_builtin___clear_cache (exp
);
7602 case BUILT_IN_CLASSIFY_TYPE
:
7603 return expand_builtin_classify_type (exp
);
7605 case BUILT_IN_CONSTANT_P
:
7608 case BUILT_IN_FRAME_ADDRESS
:
7609 case BUILT_IN_RETURN_ADDRESS
:
7610 return expand_builtin_frame_address (fndecl
, exp
);
7612 /* Returns the address of the area where the structure is returned.
7614 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
7615 if (call_expr_nargs (exp
) != 0
7616 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
7617 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
7620 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
7622 CASE_BUILT_IN_ALLOCA
:
7623 target
= expand_builtin_alloca (exp
);
7628 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
7629 return expand_asan_emit_allocas_unpoison (exp
);
7631 case BUILT_IN_STACK_SAVE
:
7632 return expand_stack_save ();
7634 case BUILT_IN_STACK_RESTORE
:
7635 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
7638 case BUILT_IN_BSWAP16
:
7639 case BUILT_IN_BSWAP32
:
7640 case BUILT_IN_BSWAP64
:
7641 case BUILT_IN_BSWAP128
:
7642 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
7647 CASE_INT_FN (BUILT_IN_FFS
):
7648 target
= expand_builtin_unop (target_mode
, exp
, target
,
7649 subtarget
, ffs_optab
);
7654 CASE_INT_FN (BUILT_IN_CLZ
):
7655 target
= expand_builtin_unop (target_mode
, exp
, target
,
7656 subtarget
, clz_optab
);
7661 CASE_INT_FN (BUILT_IN_CTZ
):
7662 target
= expand_builtin_unop (target_mode
, exp
, target
,
7663 subtarget
, ctz_optab
);
7668 CASE_INT_FN (BUILT_IN_CLRSB
):
7669 target
= expand_builtin_unop (target_mode
, exp
, target
,
7670 subtarget
, clrsb_optab
);
7675 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7676 target
= expand_builtin_unop (target_mode
, exp
, target
,
7677 subtarget
, popcount_optab
);
7682 CASE_INT_FN (BUILT_IN_PARITY
):
7683 target
= expand_builtin_unop (target_mode
, exp
, target
,
7684 subtarget
, parity_optab
);
7689 case BUILT_IN_STRLEN
:
7690 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7695 case BUILT_IN_STRNLEN
:
7696 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
7701 case BUILT_IN_STRCPY
:
7702 target
= expand_builtin_strcpy (exp
, target
);
7707 case BUILT_IN_STRNCPY
:
7708 target
= expand_builtin_strncpy (exp
, target
);
7713 case BUILT_IN_STPCPY
:
7714 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7719 case BUILT_IN_MEMCPY
:
7720 target
= expand_builtin_memcpy (exp
, target
);
7725 case BUILT_IN_MEMMOVE
:
7726 target
= expand_builtin_memmove (exp
, target
);
7731 case BUILT_IN_MEMPCPY
:
7732 target
= expand_builtin_mempcpy (exp
, target
);
7737 case BUILT_IN_MEMSET
:
7738 target
= expand_builtin_memset (exp
, target
, mode
);
7743 case BUILT_IN_BZERO
:
7744 target
= expand_builtin_bzero (exp
);
7749 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7750 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7751 when changing it to a strcmp call. */
7752 case BUILT_IN_STRCMP_EQ
:
7753 target
= expand_builtin_memcmp (exp
, target
, true);
7757 /* Change this call back to a BUILT_IN_STRCMP. */
7758 TREE_OPERAND (exp
, 1)
7759 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
7761 /* Delete the last parameter. */
7763 vec
<tree
, va_gc
> *arg_vec
;
7764 vec_alloc (arg_vec
, 2);
7765 for (i
= 0; i
< 2; i
++)
7766 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
7767 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
7770 case BUILT_IN_STRCMP
:
7771 target
= expand_builtin_strcmp (exp
, target
);
7776 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7777 back to a BUILT_IN_STRNCMP. */
7778 case BUILT_IN_STRNCMP_EQ
:
7779 target
= expand_builtin_memcmp (exp
, target
, true);
7783 /* Change it back to a BUILT_IN_STRNCMP. */
7784 TREE_OPERAND (exp
, 1)
7785 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
7788 case BUILT_IN_STRNCMP
:
7789 target
= expand_builtin_strncmp (exp
, target
, mode
);
7795 case BUILT_IN_MEMCMP
:
7796 case BUILT_IN_MEMCMP_EQ
:
7797 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7800 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7802 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7803 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7807 case BUILT_IN_SETJMP
:
7808 /* This should have been lowered to the builtins below. */
7811 case BUILT_IN_SETJMP_SETUP
:
7812 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7813 and the receiver label. */
7814 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7816 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7817 VOIDmode
, EXPAND_NORMAL
);
7818 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7819 rtx_insn
*label_r
= label_rtx (label
);
7821 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7826 case BUILT_IN_SETJMP_RECEIVER
:
7827 /* __builtin_setjmp_receiver is passed the receiver label. */
7828 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7830 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7831 rtx_insn
*label_r
= label_rtx (label
);
7833 expand_builtin_setjmp_receiver (label_r
);
7834 nonlocal_goto_handler_labels
7835 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7836 nonlocal_goto_handler_labels
);
7837 /* ??? Do not let expand_label treat us as such since we would
7838 not want to be both on the list of non-local labels and on
7839 the list of forced labels. */
7840 FORCED_LABEL (label
) = 0;
7845 /* __builtin_longjmp is passed a pointer to an array of five words.
7846 It's similar to the C library longjmp function but works with
7847 __builtin_setjmp above. */
7848 case BUILT_IN_LONGJMP
:
7849 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7851 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7852 VOIDmode
, EXPAND_NORMAL
);
7853 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7855 if (value
!= const1_rtx
)
7857 error ("%<__builtin_longjmp%> second argument must be 1");
7861 expand_builtin_longjmp (buf_addr
, value
);
7866 case BUILT_IN_NONLOCAL_GOTO
:
7867 target
= expand_builtin_nonlocal_goto (exp
);
7872 /* This updates the setjmp buffer that is its argument with the value
7873 of the current stack pointer. */
7874 case BUILT_IN_UPDATE_SETJMP_BUF
:
7875 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7878 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7880 expand_builtin_update_setjmp_buf (buf_addr
);
7886 case BUILT_IN_UNREACHABLE_TRAP
:
7887 expand_builtin_trap ();
7890 case BUILT_IN_UNREACHABLE
:
7891 expand_builtin_unreachable ();
7894 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7895 case BUILT_IN_SIGNBITD32
:
7896 case BUILT_IN_SIGNBITD64
:
7897 case BUILT_IN_SIGNBITD128
:
7898 target
= expand_builtin_signbit (exp
, target
);
7903 /* Various hooks for the DWARF 2 __throw routine. */
7904 case BUILT_IN_UNWIND_INIT
:
7905 expand_builtin_unwind_init ();
7907 case BUILT_IN_DWARF_CFA
:
7908 return virtual_cfa_rtx
;
7909 #ifdef DWARF2_UNWIND_INFO
7910 case BUILT_IN_DWARF_SP_COLUMN
:
7911 return expand_builtin_dwarf_sp_column ();
7912 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7913 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7916 case BUILT_IN_FROB_RETURN_ADDR
:
7917 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7918 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7919 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7920 case BUILT_IN_EH_RETURN
:
7921 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7922 CALL_EXPR_ARG (exp
, 1));
7924 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7925 return expand_builtin_eh_return_data_regno (exp
);
7926 case BUILT_IN_EXTEND_POINTER
:
7927 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7928 case BUILT_IN_EH_POINTER
:
7929 return expand_builtin_eh_pointer (exp
);
7930 case BUILT_IN_EH_FILTER
:
7931 return expand_builtin_eh_filter (exp
);
7932 case BUILT_IN_EH_COPY_VALUES
:
7933 return expand_builtin_eh_copy_values (exp
);
7935 case BUILT_IN_VA_START
:
7936 return expand_builtin_va_start (exp
);
7937 case BUILT_IN_VA_END
:
7938 return expand_builtin_va_end (exp
);
7939 case BUILT_IN_VA_COPY
:
7940 return expand_builtin_va_copy (exp
);
7941 case BUILT_IN_EXPECT
:
7942 return expand_builtin_expect (exp
, target
);
7943 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
7944 return expand_builtin_expect_with_probability (exp
, target
);
7945 case BUILT_IN_ASSUME_ALIGNED
:
7946 return expand_builtin_assume_aligned (exp
, target
);
7947 case BUILT_IN_PREFETCH
:
7948 expand_builtin_prefetch (exp
);
7951 case BUILT_IN_INIT_TRAMPOLINE
:
7952 return expand_builtin_init_trampoline (exp
, true);
7953 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7954 return expand_builtin_init_trampoline (exp
, false);
7955 case BUILT_IN_ADJUST_TRAMPOLINE
:
7956 return expand_builtin_adjust_trampoline (exp
);
7958 case BUILT_IN_INIT_DESCRIPTOR
:
7959 return expand_builtin_init_descriptor (exp
);
7960 case BUILT_IN_ADJUST_DESCRIPTOR
:
7961 return expand_builtin_adjust_descriptor (exp
);
7964 case BUILT_IN_EXECL
:
7965 case BUILT_IN_EXECV
:
7966 case BUILT_IN_EXECLP
:
7967 case BUILT_IN_EXECLE
:
7968 case BUILT_IN_EXECVP
:
7969 case BUILT_IN_EXECVE
:
7970 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7975 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7976 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7977 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7978 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7979 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7980 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7981 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7986 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7987 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7988 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7989 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7990 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7991 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7992 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7997 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7998 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7999 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
8000 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
8001 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
8002 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
8003 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
8008 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
8009 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
8010 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
8011 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
8012 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
8013 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
8014 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
8019 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
8020 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
8021 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
8022 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
8023 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
8024 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
8025 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
8030 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
8031 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
8032 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
8033 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
8034 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
8035 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
8036 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
8041 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
8042 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
8043 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
8044 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
8045 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
8046 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
8047 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
8052 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
8053 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
8054 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
8055 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
8056 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
8057 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
8058 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
8063 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
8064 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
8065 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
8066 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
8067 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
8068 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
8069 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
8074 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
8075 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
8076 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
8077 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
8078 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
8079 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
8080 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
8085 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
8086 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
8087 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
8088 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
8089 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
8090 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
8091 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
8096 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
8097 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
8098 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
8099 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
8100 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
8101 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
8102 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
8107 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
8108 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
8109 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
8110 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
8111 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
8112 if (mode
== VOIDmode
)
8113 mode
= TYPE_MODE (boolean_type_node
);
8114 if (!target
|| !register_operand (target
, mode
))
8115 target
= gen_reg_rtx (mode
);
8117 mode
= get_builtin_sync_mode
8118 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
8119 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
8124 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
8125 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
8126 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
8127 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
8128 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
8129 mode
= get_builtin_sync_mode
8130 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
8131 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
8136 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
8137 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
8138 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
8139 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
8140 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
8141 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
8142 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
8147 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
8148 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
8149 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
8150 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
8151 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
8152 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
8153 expand_builtin_sync_lock_release (mode
, exp
);
8156 case BUILT_IN_SYNC_SYNCHRONIZE
:
8157 expand_builtin_sync_synchronize ();
8160 case BUILT_IN_ATOMIC_EXCHANGE_1
:
8161 case BUILT_IN_ATOMIC_EXCHANGE_2
:
8162 case BUILT_IN_ATOMIC_EXCHANGE_4
:
8163 case BUILT_IN_ATOMIC_EXCHANGE_8
:
8164 case BUILT_IN_ATOMIC_EXCHANGE_16
:
8165 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
8166 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
8171 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
8172 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
8173 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
8174 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
8175 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
8177 unsigned int nargs
, z
;
8178 vec
<tree
, va_gc
> *vec
;
8181 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
8182 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
8186 /* If this is turned into an external library call, the weak parameter
8187 must be dropped to match the expected parameter list. */
8188 nargs
= call_expr_nargs (exp
);
8189 vec_alloc (vec
, nargs
- 1);
8190 for (z
= 0; z
< 3; z
++)
8191 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8192 /* Skip the boolean weak parameter. */
8193 for (z
= 4; z
< 6; z
++)
8194 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8195 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
8199 case BUILT_IN_ATOMIC_LOAD_1
:
8200 case BUILT_IN_ATOMIC_LOAD_2
:
8201 case BUILT_IN_ATOMIC_LOAD_4
:
8202 case BUILT_IN_ATOMIC_LOAD_8
:
8203 case BUILT_IN_ATOMIC_LOAD_16
:
8204 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
8205 target
= expand_builtin_atomic_load (mode
, exp
, target
);
8210 case BUILT_IN_ATOMIC_STORE_1
:
8211 case BUILT_IN_ATOMIC_STORE_2
:
8212 case BUILT_IN_ATOMIC_STORE_4
:
8213 case BUILT_IN_ATOMIC_STORE_8
:
8214 case BUILT_IN_ATOMIC_STORE_16
:
8215 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
8216 target
= expand_builtin_atomic_store (mode
, exp
);
8221 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
8222 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
8223 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
8224 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
8225 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
8227 enum built_in_function lib
;
8228 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
8229 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
8230 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
8231 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
8237 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
8238 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
8239 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
8240 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
8241 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
8243 enum built_in_function lib
;
8244 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
8245 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
8246 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
8247 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
8253 case BUILT_IN_ATOMIC_AND_FETCH_1
:
8254 case BUILT_IN_ATOMIC_AND_FETCH_2
:
8255 case BUILT_IN_ATOMIC_AND_FETCH_4
:
8256 case BUILT_IN_ATOMIC_AND_FETCH_8
:
8257 case BUILT_IN_ATOMIC_AND_FETCH_16
:
8259 enum built_in_function lib
;
8260 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
8261 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
8262 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
8263 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
8269 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
8270 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
8271 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
8272 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
8273 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
8275 enum built_in_function lib
;
8276 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
8277 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
8278 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
8279 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
8285 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
8286 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
8287 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
8288 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
8289 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
8291 enum built_in_function lib
;
8292 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
8293 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
8294 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
8295 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
8301 case BUILT_IN_ATOMIC_OR_FETCH_1
:
8302 case BUILT_IN_ATOMIC_OR_FETCH_2
:
8303 case BUILT_IN_ATOMIC_OR_FETCH_4
:
8304 case BUILT_IN_ATOMIC_OR_FETCH_8
:
8305 case BUILT_IN_ATOMIC_OR_FETCH_16
:
8307 enum built_in_function lib
;
8308 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
8309 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
8310 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
8311 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
8317 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
8318 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
8319 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
8320 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
8321 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
8322 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
8323 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
8324 ignore
, BUILT_IN_NONE
);
8329 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
8330 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
8331 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
8332 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
8333 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
8334 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
8335 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
8336 ignore
, BUILT_IN_NONE
);
8341 case BUILT_IN_ATOMIC_FETCH_AND_1
:
8342 case BUILT_IN_ATOMIC_FETCH_AND_2
:
8343 case BUILT_IN_ATOMIC_FETCH_AND_4
:
8344 case BUILT_IN_ATOMIC_FETCH_AND_8
:
8345 case BUILT_IN_ATOMIC_FETCH_AND_16
:
8346 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
8347 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
8348 ignore
, BUILT_IN_NONE
);
8353 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
8354 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
8355 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
8356 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
8357 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
8358 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
8359 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
8360 ignore
, BUILT_IN_NONE
);
8365 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
8366 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
8367 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
8368 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
8369 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
8370 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
8371 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
8372 ignore
, BUILT_IN_NONE
);
8377 case BUILT_IN_ATOMIC_FETCH_OR_1
:
8378 case BUILT_IN_ATOMIC_FETCH_OR_2
:
8379 case BUILT_IN_ATOMIC_FETCH_OR_4
:
8380 case BUILT_IN_ATOMIC_FETCH_OR_8
:
8381 case BUILT_IN_ATOMIC_FETCH_OR_16
:
8382 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
8383 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
8384 ignore
, BUILT_IN_NONE
);
8389 case BUILT_IN_ATOMIC_TEST_AND_SET
:
8390 target
= expand_builtin_atomic_test_and_set (exp
, target
);
8395 case BUILT_IN_ATOMIC_CLEAR
:
8396 return expand_builtin_atomic_clear (exp
);
8398 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8399 return expand_builtin_atomic_always_lock_free (exp
);
8401 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8402 target
= expand_builtin_atomic_is_lock_free (exp
);
8407 case BUILT_IN_ATOMIC_THREAD_FENCE
:
8408 expand_builtin_atomic_thread_fence (exp
);
8411 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
8412 expand_builtin_atomic_signal_fence (exp
);
8415 case BUILT_IN_OBJECT_SIZE
:
8416 case BUILT_IN_DYNAMIC_OBJECT_SIZE
:
8417 return expand_builtin_object_size (exp
);
8419 case BUILT_IN_MEMCPY_CHK
:
8420 case BUILT_IN_MEMPCPY_CHK
:
8421 case BUILT_IN_MEMMOVE_CHK
:
8422 case BUILT_IN_MEMSET_CHK
:
8423 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
8428 case BUILT_IN_STRCPY_CHK
:
8429 case BUILT_IN_STPCPY_CHK
:
8430 case BUILT_IN_STRNCPY_CHK
:
8431 case BUILT_IN_STPNCPY_CHK
:
8432 case BUILT_IN_STRCAT_CHK
:
8433 case BUILT_IN_STRNCAT_CHK
:
8434 case BUILT_IN_SNPRINTF_CHK
:
8435 case BUILT_IN_VSNPRINTF_CHK
:
8436 maybe_emit_chk_warning (exp
, fcode
);
8439 case BUILT_IN_SPRINTF_CHK
:
8440 case BUILT_IN_VSPRINTF_CHK
:
8441 maybe_emit_sprintf_chk_warning (exp
, fcode
);
8444 case BUILT_IN_THREAD_POINTER
:
8445 return expand_builtin_thread_pointer (exp
, target
);
8447 case BUILT_IN_SET_THREAD_POINTER
:
8448 expand_builtin_set_thread_pointer (exp
);
8451 case BUILT_IN_ACC_ON_DEVICE
:
8452 /* Do library call, if we failed to expand the builtin when
8456 case BUILT_IN_GOACC_PARLEVEL_ID
:
8457 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
8458 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
8460 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
8461 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
8463 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
8464 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
8465 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
8466 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
8467 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
8468 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
8469 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
8471 default: /* just do library call, if unknown builtin */
8475 /* The switch statement above can drop through to cause the function
8476 to be called normally. */
8477 return expand_call (exp
, target
, ignore
);
8480 /* Determine whether a tree node represents a call to a built-in
8481 function. If the tree T is a call to a built-in function with
8482 the right number of arguments of the appropriate types, return
8483 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8484 Otherwise the return value is END_BUILTINS. */
8486 enum built_in_function
8487 builtin_mathfn_code (const_tree t
)
8489 const_tree fndecl
, arg
, parmlist
;
8490 const_tree argtype
, parmtype
;
8491 const_call_expr_arg_iterator iter
;
8493 if (TREE_CODE (t
) != CALL_EXPR
)
8494 return END_BUILTINS
;
8496 fndecl
= get_callee_fndecl (t
);
8497 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8498 return END_BUILTINS
;
8500 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8501 init_const_call_expr_arg_iterator (t
, &iter
);
8502 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
8504 /* If a function doesn't take a variable number of arguments,
8505 the last element in the list will have type `void'. */
8506 parmtype
= TREE_VALUE (parmlist
);
8507 if (VOID_TYPE_P (parmtype
))
8509 if (more_const_call_expr_args_p (&iter
))
8510 return END_BUILTINS
;
8511 return DECL_FUNCTION_CODE (fndecl
);
8514 if (! more_const_call_expr_args_p (&iter
))
8515 return END_BUILTINS
;
8517 arg
= next_const_call_expr_arg (&iter
);
8518 argtype
= TREE_TYPE (arg
);
8520 if (SCALAR_FLOAT_TYPE_P (parmtype
))
8522 if (! SCALAR_FLOAT_TYPE_P (argtype
))
8523 return END_BUILTINS
;
8525 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
8527 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
8528 return END_BUILTINS
;
8530 else if (POINTER_TYPE_P (parmtype
))
8532 if (! POINTER_TYPE_P (argtype
))
8533 return END_BUILTINS
;
8535 else if (INTEGRAL_TYPE_P (parmtype
))
8537 if (! INTEGRAL_TYPE_P (argtype
))
8538 return END_BUILTINS
;
8541 return END_BUILTINS
;
8544 /* Variable-length argument list. */
8545 return DECL_FUNCTION_CODE (fndecl
);
8548 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8549 evaluate to a constant. */
8552 fold_builtin_constant_p (tree arg
)
8554 /* We return 1 for a numeric type that's known to be a constant
8555 value at compile-time or for an aggregate type that's a
8556 literal constant. */
8559 /* If we know this is a constant, emit the constant of one. */
8560 if (CONSTANT_CLASS_P (arg
)
8561 || (TREE_CODE (arg
) == CONSTRUCTOR
8562 && TREE_CONSTANT (arg
)))
8563 return integer_one_node
;
8564 if (TREE_CODE (arg
) == ADDR_EXPR
)
8566 tree op
= TREE_OPERAND (arg
, 0);
8567 if (TREE_CODE (op
) == STRING_CST
8568 || (TREE_CODE (op
) == ARRAY_REF
8569 && integer_zerop (TREE_OPERAND (op
, 1))
8570 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
8571 return integer_one_node
;
8574 /* If this expression has side effects, show we don't know it to be a
8575 constant. Likewise if it's a pointer or aggregate type since in
8576 those case we only want literals, since those are only optimized
8577 when generating RTL, not later.
8578 And finally, if we are compiling an initializer, not code, we
8579 need to return a definite result now; there's not going to be any
8580 more optimization done. */
8581 if (TREE_SIDE_EFFECTS (arg
)
8582 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
8583 || POINTER_TYPE_P (TREE_TYPE (arg
))
8585 || folding_initializer
8586 || force_folding_builtin_constant_p
)
8587 return integer_zero_node
;
8592 /* Create builtin_expect or builtin_expect_with_probability
8593 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8594 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8595 builtin_expect_with_probability instead uses third argument as PROBABILITY
8599 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
8600 tree predictor
, tree probability
)
8602 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
8604 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
8605 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
8606 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
8607 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
8608 pred_type
= TREE_VALUE (arg_types
);
8609 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
8611 pred
= fold_convert_loc (loc
, pred_type
, pred
);
8612 expected
= fold_convert_loc (loc
, expected_type
, expected
);
8615 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
8617 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
8620 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
8621 build_int_cst (ret_type
, 0));
8624 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8625 NULL_TREE if no simplification is possible. */
8628 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
8631 tree inner
, fndecl
, inner_arg0
;
8632 enum tree_code code
;
8634 /* Distribute the expected value over short-circuiting operators.
8635 See through the cast from truthvalue_type_node to long. */
8637 while (CONVERT_EXPR_P (inner_arg0
)
8638 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
8639 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
8640 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
8642 /* If this is a builtin_expect within a builtin_expect keep the
8643 inner one. See through a comparison against a constant. It
8644 might have been added to create a thruthvalue. */
8647 if (COMPARISON_CLASS_P (inner
)
8648 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8649 inner
= TREE_OPERAND (inner
, 0);
8651 if (TREE_CODE (inner
) == CALL_EXPR
8652 && (fndecl
= get_callee_fndecl (inner
))
8653 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
,
8654 BUILT_IN_EXPECT_WITH_PROBABILITY
))
8658 code
= TREE_CODE (inner
);
8659 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8661 tree op0
= TREE_OPERAND (inner
, 0);
8662 tree op1
= TREE_OPERAND (inner
, 1);
8663 arg1
= save_expr (arg1
);
8665 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
8666 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
8667 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8669 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8672 /* If the argument isn't invariant then there's nothing else we can do. */
8673 if (!TREE_CONSTANT (inner_arg0
))
8676 /* If we expect that a comparison against the argument will fold to
8677 a constant return the constant. In practice, this means a true
8678 constant or the address of a non-weak symbol. */
8681 if (TREE_CODE (inner
) == ADDR_EXPR
)
8685 inner
= TREE_OPERAND (inner
, 0);
8687 while (TREE_CODE (inner
) == COMPONENT_REF
8688 || TREE_CODE (inner
) == ARRAY_REF
);
8689 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8693 /* Otherwise, ARG0 already has the proper type for the return value. */
8697 /* Fold a call to __builtin_classify_type with argument ARG. */
8700 fold_builtin_classify_type (tree arg
)
8703 return build_int_cst (integer_type_node
, no_type_class
);
8705 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8708 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8712 fold_builtin_strlen (location_t loc
, tree expr
, tree type
, tree arg
)
8714 if (!validate_arg (arg
, POINTER_TYPE
))
8718 c_strlen_data lendata
= { };
8719 tree len
= c_strlen (arg
, 0, &lendata
);
8722 return fold_convert_loc (loc
, type
, len
);
8724 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8725 also early enough to detect invalid reads in multimensional
8726 arrays and struct members. */
8728 c_strlen (arg
, 1, &lendata
);
8732 if (EXPR_HAS_LOCATION (arg
))
8733 loc
= EXPR_LOCATION (arg
);
8734 else if (loc
== UNKNOWN_LOCATION
)
8735 loc
= input_location
;
8736 warn_string_no_nul (loc
, expr
, "strlen", arg
, lendata
.decl
);
8743 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8746 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8748 /* __builtin_inff is intended to be usable to define INFINITY on all
8749 targets. If an infinity is not available, INFINITY expands "to a
8750 positive constant of type float that overflows at translation
8751 time", footnote "In this case, using INFINITY will violate the
8752 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8753 Thus we pedwarn to ensure this constraint violation is
8755 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8756 pedwarn (loc
, 0, "target format does not support infinity");
8758 return build_real (type
, dconstinf
);
8761 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8762 NULL_TREE if no simplification can be made. */
8765 fold_builtin_sincos (location_t loc
,
8766 tree arg0
, tree arg1
, tree arg2
)
8769 tree fndecl
, call
= NULL_TREE
;
8771 if (!validate_arg (arg0
, REAL_TYPE
)
8772 || !validate_arg (arg1
, POINTER_TYPE
)
8773 || !validate_arg (arg2
, POINTER_TYPE
))
8776 type
= TREE_TYPE (arg0
);
8778 /* Calculate the result when the argument is a constant. */
8779 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8780 if (fn
== END_BUILTINS
)
8783 /* Canonicalize sincos to cexpi. */
8784 if (TREE_CODE (arg0
) == REAL_CST
)
8786 tree complex_type
= build_complex_type (type
);
8787 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8791 if (!targetm
.libc_has_function (function_c99_math_complex
, type
)
8792 || !builtin_decl_implicit_p (fn
))
8794 fndecl
= builtin_decl_explicit (fn
);
8795 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8796 call
= builtin_save_expr (call
);
8799 tree ptype
= build_pointer_type (type
);
8800 arg1
= fold_convert (ptype
, arg1
);
8801 arg2
= fold_convert (ptype
, arg2
);
8802 return build2 (COMPOUND_EXPR
, void_type_node
,
8803 build2 (MODIFY_EXPR
, void_type_node
,
8804 build_fold_indirect_ref_loc (loc
, arg1
),
8805 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8806 build2 (MODIFY_EXPR
, void_type_node
,
8807 build_fold_indirect_ref_loc (loc
, arg2
),
8808 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8811 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8812 Return NULL_TREE if no simplification can be made. */
8815 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8817 if (!validate_arg (arg1
, POINTER_TYPE
)
8818 || !validate_arg (arg2
, POINTER_TYPE
)
8819 || !validate_arg (len
, INTEGER_TYPE
))
8822 /* If the LEN parameter is zero, return zero. */
8823 if (integer_zerop (len
))
8824 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8827 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8828 if (operand_equal_p (arg1
, arg2
, 0))
8829 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8831 /* If len parameter is one, return an expression corresponding to
8832 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8833 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8835 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8836 tree cst_uchar_ptr_node
8837 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8840 = fold_convert_loc (loc
, integer_type_node
,
8841 build1 (INDIRECT_REF
, cst_uchar_node
,
8842 fold_convert_loc (loc
,
8846 = fold_convert_loc (loc
, integer_type_node
,
8847 build1 (INDIRECT_REF
, cst_uchar_node
,
8848 fold_convert_loc (loc
,
8851 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8857 /* Fold a call to builtin isascii with argument ARG. */
8860 fold_builtin_isascii (location_t loc
, tree arg
)
8862 if (!validate_arg (arg
, INTEGER_TYPE
))
8866 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8867 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8868 build_int_cst (integer_type_node
,
8869 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8870 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8871 arg
, integer_zero_node
);
8875 /* Fold a call to builtin toascii with argument ARG. */
8878 fold_builtin_toascii (location_t loc
, tree arg
)
8880 if (!validate_arg (arg
, INTEGER_TYPE
))
8883 /* Transform toascii(c) -> (c & 0x7f). */
8884 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8885 build_int_cst (integer_type_node
, 0x7f));
8888 /* Fold a call to builtin isdigit with argument ARG. */
8891 fold_builtin_isdigit (location_t loc
, tree arg
)
8893 if (!validate_arg (arg
, INTEGER_TYPE
))
8897 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8898 /* According to the C standard, isdigit is unaffected by locale.
8899 However, it definitely is affected by the target character set. */
8900 unsigned HOST_WIDE_INT target_digit0
8901 = lang_hooks
.to_target_charset ('0');
8903 if (target_digit0
== 0)
8906 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8907 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8908 build_int_cst (unsigned_type_node
, target_digit0
));
8909 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8910 build_int_cst (unsigned_type_node
, 9));
8914 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8917 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8919 if (!validate_arg (arg
, REAL_TYPE
))
8922 arg
= fold_convert_loc (loc
, type
, arg
);
8923 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8926 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8929 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8931 if (!validate_arg (arg
, INTEGER_TYPE
))
8934 arg
= fold_convert_loc (loc
, type
, arg
);
8935 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8938 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8941 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8943 if (validate_arg (arg
, COMPLEX_TYPE
)
8944 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg
))))
8946 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8950 tree new_arg
= builtin_save_expr (arg
);
8951 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8952 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8953 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8960 /* Fold a call to builtin frexp, we can assume the base is 2. */
8963 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8965 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8970 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8973 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8975 /* Proceed if a valid pointer type was passed in. */
8976 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8978 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8979 tree frac
, exp
, res
;
8984 /* For +-0, return (*exp = 0, +-0). */
8985 exp
= integer_zero_node
;
8990 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8991 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8994 /* Since the frexp function always expects base 2, and in
8995 GCC normalized significands are already in the range
8996 [0.5, 1.0), we have exactly what frexp wants. */
8997 REAL_VALUE_TYPE frac_rvt
= *value
;
8998 SET_REAL_EXP (&frac_rvt
, 0);
8999 frac
= build_real (rettype
, frac_rvt
);
9000 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9007 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9008 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9009 TREE_SIDE_EFFECTS (arg1
) = 1;
9010 res
= fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9011 suppress_warning (res
, OPT_Wunused_value
);
9018 /* Fold a call to builtin modf. */
9021 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9023 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9028 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9031 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9033 /* Proceed if a valid pointer type was passed in. */
9034 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9036 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9037 REAL_VALUE_TYPE trunc
, frac
;
9044 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9045 trunc
= frac
= *value
;
9048 /* For +-Inf, return (*arg1 = arg0, +-0). */
9050 frac
.sign
= value
->sign
;
9054 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9055 real_trunc (&trunc
, VOIDmode
, value
);
9056 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9057 /* If the original number was negative and already
9058 integral, then the fractional part is -0.0. */
9059 if (value
->sign
&& frac
.cl
== rvc_zero
)
9060 frac
.sign
= value
->sign
;
9064 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9065 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9066 build_real (rettype
, trunc
));
9067 TREE_SIDE_EFFECTS (arg1
) = 1;
9068 res
= fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9069 build_real (rettype
, frac
));
9070 suppress_warning (res
, OPT_Wunused_value
);
9077 /* Given a location LOC, an interclass builtin function decl FNDECL
9078 and its single argument ARG, return an folded expression computing
9079 the same, or NULL_TREE if we either couldn't or didn't want to fold
9080 (the latter happen if there's an RTL instruction available). */
9083 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9087 if (!validate_arg (arg
, REAL_TYPE
))
9090 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9093 mode
= TYPE_MODE (TREE_TYPE (arg
));
9095 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
9097 /* If there is no optab, try generic code. */
9098 switch (DECL_FUNCTION_CODE (fndecl
))
9102 CASE_FLT_FN (BUILT_IN_ISINF
):
9104 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9105 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9106 tree type
= TREE_TYPE (arg
);
9110 if (is_ibm_extended
)
9112 /* NaN and Inf are encoded in the high-order double value
9113 only. The low-order value is not significant. */
9114 type
= double_type_node
;
9116 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9118 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
9119 real_from_string (&r
, buf
);
9120 result
= build_call_expr (isgr_fn
, 2,
9121 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9122 build_real (type
, r
));
9125 CASE_FLT_FN (BUILT_IN_FINITE
):
9126 case BUILT_IN_ISFINITE
:
9128 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9129 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9130 tree type
= TREE_TYPE (arg
);
9134 if (is_ibm_extended
)
9136 /* NaN and Inf are encoded in the high-order double value
9137 only. The low-order value is not significant. */
9138 type
= double_type_node
;
9140 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9142 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
9143 real_from_string (&r
, buf
);
9144 result
= build_call_expr (isle_fn
, 2,
9145 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9146 build_real (type
, r
));
9147 /*result = fold_build2_loc (loc, UNGT_EXPR,
9148 TREE_TYPE (TREE_TYPE (fndecl)),
9149 fold_build1_loc (loc, ABS_EXPR, type, arg),
9150 build_real (type, r));
9151 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9152 TREE_TYPE (TREE_TYPE (fndecl)),
9156 case BUILT_IN_ISNORMAL
:
9158 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9159 islessequal(fabs(x),DBL_MAX). */
9160 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9161 tree type
= TREE_TYPE (arg
);
9162 tree orig_arg
, max_exp
, min_exp
;
9163 machine_mode orig_mode
= mode
;
9164 REAL_VALUE_TYPE rmax
, rmin
;
9167 orig_arg
= arg
= builtin_save_expr (arg
);
9168 if (is_ibm_extended
)
9170 /* Use double to test the normal range of IBM extended
9171 precision. Emin for IBM extended precision is
9172 different to emin for IEEE double, being 53 higher
9173 since the low double exponent is at least 53 lower
9174 than the high double exponent. */
9175 type
= double_type_node
;
9177 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9179 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9181 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
9182 real_from_string (&rmax
, buf
);
9183 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
9184 real_from_string (&rmin
, buf
);
9185 max_exp
= build_real (type
, rmax
);
9186 min_exp
= build_real (type
, rmin
);
9188 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
9189 if (is_ibm_extended
)
9191 /* Testing the high end of the range is done just using
9192 the high double, using the same test as isfinite().
9193 For the subnormal end of the range we first test the
9194 high double, then if its magnitude is equal to the
9195 limit of 0x1p-969, we test whether the low double is
9196 non-zero and opposite sign to the high double. */
9197 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
9198 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9199 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
9200 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
9202 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
9203 complex_double_type_node
, orig_arg
);
9204 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
9205 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
9206 tree zero
= build_real (type
, dconst0
);
9207 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
9208 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
9209 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
9210 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
9211 fold_build3 (COND_EXPR
,
9214 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
9216 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
9222 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9223 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
9225 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
9236 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9237 ARG is the argument for the call. */
9240 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9242 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9244 if (!validate_arg (arg
, REAL_TYPE
))
9247 switch (builtin_index
)
9249 case BUILT_IN_ISINF
:
9250 if (tree_expr_infinite_p (arg
))
9251 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9252 if (!tree_expr_maybe_infinite_p (arg
))
9253 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9256 case BUILT_IN_ISINF_SIGN
:
9258 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9259 /* In a boolean context, GCC will fold the inner COND_EXPR to
9260 1. So e.g. "if (isinf_sign(x))" would be folded to just
9261 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9262 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
9263 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9264 tree tmp
= NULL_TREE
;
9266 arg
= builtin_save_expr (arg
);
9268 if (signbit_fn
&& isinf_fn
)
9270 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9271 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9273 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9274 signbit_call
, integer_zero_node
);
9275 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9276 isinf_call
, integer_zero_node
);
9278 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9279 integer_minus_one_node
, integer_one_node
);
9280 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9288 case BUILT_IN_ISFINITE
:
9289 if (tree_expr_finite_p (arg
))
9290 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9291 if (tree_expr_nan_p (arg
) || tree_expr_infinite_p (arg
))
9292 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9295 case BUILT_IN_ISNAN
:
9296 if (tree_expr_nan_p (arg
))
9297 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9298 if (!tree_expr_maybe_nan_p (arg
))
9299 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9302 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
9303 if (is_ibm_extended
)
9305 /* NaN and Inf are encoded in the high-order double value
9306 only. The low-order value is not significant. */
9307 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
9310 arg
= builtin_save_expr (arg
);
9311 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9313 case BUILT_IN_ISSIGNALING
:
9314 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9315 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9316 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9317 here, so there is some possibility of __builtin_issignaling working
9318 without -fsignaling-nans. Especially when -fno-signaling-nans is
9320 if (!tree_expr_maybe_nan_p (arg
))
9321 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9329 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9330 This builtin will generate code to return the appropriate floating
9331 point classification depending on the value of the floating point
9332 number passed in. The possible return values must be supplied as
9333 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9334 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9335 one floating point argument which is "type generic". */
9338 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9340 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9341 arg
, type
, res
, tmp
;
9346 /* Verify the required arguments in the original call. */
9348 || !validate_arg (args
[0], INTEGER_TYPE
)
9349 || !validate_arg (args
[1], INTEGER_TYPE
)
9350 || !validate_arg (args
[2], INTEGER_TYPE
)
9351 || !validate_arg (args
[3], INTEGER_TYPE
)
9352 || !validate_arg (args
[4], INTEGER_TYPE
)
9353 || !validate_arg (args
[5], REAL_TYPE
))
9357 fp_infinite
= args
[1];
9358 fp_normal
= args
[2];
9359 fp_subnormal
= args
[3];
9362 type
= TREE_TYPE (arg
);
9363 mode
= TYPE_MODE (type
);
9364 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9368 (fabs(x) == Inf ? FP_INFINITE :
9369 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9370 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9372 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9373 build_real (type
, dconst0
));
9374 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9375 tmp
, fp_zero
, fp_subnormal
);
9377 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9378 real_from_string (&r
, buf
);
9379 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9380 arg
, build_real (type
, r
));
9381 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9383 if (tree_expr_maybe_infinite_p (arg
))
9385 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9386 build_real (type
, dconstinf
));
9387 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9391 if (tree_expr_maybe_nan_p (arg
))
9393 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9394 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9400 /* Fold a call to an unordered comparison function such as
9401 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9402 being called and ARG0 and ARG1 are the arguments for the call.
9403 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9404 the opposite of the desired result. UNORDERED_CODE is used
9405 for modes that can hold NaNs and ORDERED_CODE is used for
9409 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9410 enum tree_code unordered_code
,
9411 enum tree_code ordered_code
)
9413 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9414 enum tree_code code
;
9416 enum tree_code code0
, code1
;
9417 tree cmp_type
= NULL_TREE
;
9419 type0
= TREE_TYPE (arg0
);
9420 type1
= TREE_TYPE (arg1
);
9422 code0
= TREE_CODE (type0
);
9423 code1
= TREE_CODE (type1
);
9425 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9426 /* Choose the wider of two real types. */
9427 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9429 else if (code0
== REAL_TYPE
9430 && (code1
== INTEGER_TYPE
|| code1
== BITINT_TYPE
))
9432 else if ((code0
== INTEGER_TYPE
|| code0
== BITINT_TYPE
)
9433 && code1
== REAL_TYPE
)
9436 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9437 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9439 if (unordered_code
== UNORDERED_EXPR
)
9441 if (tree_expr_nan_p (arg0
) || tree_expr_nan_p (arg1
))
9442 return omit_two_operands_loc (loc
, type
, integer_one_node
, arg0
, arg1
);
9443 if (!tree_expr_maybe_nan_p (arg0
) && !tree_expr_maybe_nan_p (arg1
))
9444 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9445 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9448 code
= (tree_expr_maybe_nan_p (arg0
) || tree_expr_maybe_nan_p (arg1
))
9449 ? unordered_code
: ordered_code
;
9450 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9451 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9454 /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
9455 After choosing the wider floating-point type for the comparison,
9456 the code is folded to:
9457 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
9460 fold_builtin_iseqsig (location_t loc
, tree arg0
, tree arg1
)
9463 enum tree_code code0
, code1
;
9464 tree cmp1
, cmp2
, cmp_type
= NULL_TREE
;
9466 type0
= TREE_TYPE (arg0
);
9467 type1
= TREE_TYPE (arg1
);
9469 code0
= TREE_CODE (type0
);
9470 code1
= TREE_CODE (type1
);
9472 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9473 /* Choose the wider of two real types. */
9474 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9476 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9478 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9481 arg0
= builtin_save_expr (fold_convert_loc (loc
, cmp_type
, arg0
));
9482 arg1
= builtin_save_expr (fold_convert_loc (loc
, cmp_type
, arg1
));
9484 cmp1
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
, arg0
, arg1
);
9485 cmp2
= fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg0
, arg1
);
9487 return fold_build2_loc (loc
, TRUTH_AND_EXPR
, integer_type_node
, cmp1
, cmp2
);
9490 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9491 arithmetics if it can never overflow, or into internal functions that
9492 return both result of arithmetics and overflowed boolean flag in
9493 a complex integer result, or some other check for overflow.
9494 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9495 checking part of that. */
9498 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9499 tree arg0
, tree arg1
, tree arg2
)
9501 enum internal_fn ifn
= IFN_LAST
;
9502 /* The code of the expression corresponding to the built-in. */
9503 enum tree_code opcode
= ERROR_MARK
;
9504 bool ovf_only
= false;
9508 case BUILT_IN_ADD_OVERFLOW_P
:
9511 case BUILT_IN_ADD_OVERFLOW
:
9512 case BUILT_IN_SADD_OVERFLOW
:
9513 case BUILT_IN_SADDL_OVERFLOW
:
9514 case BUILT_IN_SADDLL_OVERFLOW
:
9515 case BUILT_IN_UADD_OVERFLOW
:
9516 case BUILT_IN_UADDL_OVERFLOW
:
9517 case BUILT_IN_UADDLL_OVERFLOW
:
9519 ifn
= IFN_ADD_OVERFLOW
;
9521 case BUILT_IN_SUB_OVERFLOW_P
:
9524 case BUILT_IN_SUB_OVERFLOW
:
9525 case BUILT_IN_SSUB_OVERFLOW
:
9526 case BUILT_IN_SSUBL_OVERFLOW
:
9527 case BUILT_IN_SSUBLL_OVERFLOW
:
9528 case BUILT_IN_USUB_OVERFLOW
:
9529 case BUILT_IN_USUBL_OVERFLOW
:
9530 case BUILT_IN_USUBLL_OVERFLOW
:
9531 opcode
= MINUS_EXPR
;
9532 ifn
= IFN_SUB_OVERFLOW
;
9534 case BUILT_IN_MUL_OVERFLOW_P
:
9537 case BUILT_IN_MUL_OVERFLOW
:
9538 case BUILT_IN_SMUL_OVERFLOW
:
9539 case BUILT_IN_SMULL_OVERFLOW
:
9540 case BUILT_IN_SMULLL_OVERFLOW
:
9541 case BUILT_IN_UMUL_OVERFLOW
:
9542 case BUILT_IN_UMULL_OVERFLOW
:
9543 case BUILT_IN_UMULLL_OVERFLOW
:
9545 ifn
= IFN_MUL_OVERFLOW
;
9551 /* For the "generic" overloads, the first two arguments can have different
9552 types and the last argument determines the target type to use to check
9553 for overflow. The arguments of the other overloads all have the same
9555 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
9557 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9558 arguments are constant, attempt to fold the built-in call into a constant
9559 expression indicating whether or not it detected an overflow. */
9561 && TREE_CODE (arg0
) == INTEGER_CST
9562 && TREE_CODE (arg1
) == INTEGER_CST
)
9563 /* Perform the computation in the target type and check for overflow. */
9564 return omit_one_operand_loc (loc
, boolean_type_node
,
9565 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9566 ? boolean_true_node
: boolean_false_node
,
9569 tree intres
, ovfres
;
9570 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9572 intres
= fold_binary_loc (loc
, opcode
, type
,
9573 fold_convert_loc (loc
, type
, arg0
),
9574 fold_convert_loc (loc
, type
, arg1
));
9575 if (TREE_OVERFLOW (intres
))
9576 intres
= drop_tree_overflow (intres
);
9577 ovfres
= (arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9578 ? boolean_true_node
: boolean_false_node
);
9582 tree ctype
= build_complex_type (type
);
9583 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
9585 tree tgt
= save_expr (call
);
9586 intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9587 ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9588 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9592 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
9594 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9596 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9597 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9600 /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
9601 that return both result of arithmetics and overflowed boolean
9602 flag in a complex integer result. */
9605 fold_builtin_addc_subc (location_t loc
, enum built_in_function fcode
,
9608 enum internal_fn ifn
;
9613 case BUILT_IN_ADDCL
:
9614 case BUILT_IN_ADDCLL
:
9615 ifn
= IFN_ADD_OVERFLOW
;
9618 case BUILT_IN_SUBCL
:
9619 case BUILT_IN_SUBCLL
:
9620 ifn
= IFN_SUB_OVERFLOW
;
9626 tree type
= TREE_TYPE (args
[0]);
9627 tree ctype
= build_complex_type (type
);
9628 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
9630 tree tgt
= save_expr (call
);
9631 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9632 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9633 call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
9635 tgt
= save_expr (call
);
9636 intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9637 tree ovfres2
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9638 ovfres
= build2_loc (loc
, BIT_IOR_EXPR
, type
, ovfres
, ovfres2
);
9639 tree mem_arg3
= build_fold_indirect_ref_loc (loc
, args
[3]);
9641 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg3
, ovfres
);
9642 return build2_loc (loc
, COMPOUND_EXPR
, type
, store
, intres
);
9645 /* Fold a call to __builtin_FILE to a constant string. */
9648 fold_builtin_FILE (location_t loc
)
9650 if (const char *fname
= LOCATION_FILE (loc
))
9652 /* The documentation says this builtin is equivalent to the preprocessor
9653 __FILE__ macro so it appears appropriate to use the same file prefix
9655 fname
= remap_macro_filename (fname
);
9656 return build_string_literal (fname
);
9659 return build_string_literal ("");
9662 /* Fold a call to __builtin_FUNCTION to a constant string. */
9665 fold_builtin_FUNCTION ()
9667 const char *name
= "";
9669 if (current_function_decl
)
9670 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
9672 return build_string_literal (name
);
9675 /* Fold a call to __builtin_LINE to an integer constant. */
9678 fold_builtin_LINE (location_t loc
, tree type
)
9680 return build_int_cst (type
, LOCATION_LINE (loc
));
9683 /* Fold a call to built-in function FNDECL with 0 arguments.
9684 This function returns NULL_TREE if no simplification was possible. */
9687 fold_builtin_0 (location_t loc
, tree fndecl
)
9689 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9690 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9694 return fold_builtin_FILE (loc
);
9696 case BUILT_IN_FUNCTION
:
9697 return fold_builtin_FUNCTION ();
9700 return fold_builtin_LINE (loc
, type
);
9702 CASE_FLT_FN (BUILT_IN_INF
):
9703 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
9704 case BUILT_IN_INFD32
:
9705 case BUILT_IN_INFD64
:
9706 case BUILT_IN_INFD128
:
9707 return fold_builtin_inf (loc
, type
, true);
9709 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9710 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
9711 return fold_builtin_inf (loc
, type
, false);
9713 case BUILT_IN_CLASSIFY_TYPE
:
9714 return fold_builtin_classify_type (NULL_TREE
);
9716 case BUILT_IN_UNREACHABLE
:
9717 /* Rewrite any explicit calls to __builtin_unreachable. */
9718 if (sanitize_flags_p (SANITIZE_UNREACHABLE
))
9719 return build_builtin_unreachable (loc
);
9728 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9729 This function returns NULL_TREE if no simplification was possible. */
9732 fold_builtin_1 (location_t loc
, tree expr
, tree fndecl
, tree arg0
)
9734 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9735 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9737 if (TREE_CODE (arg0
) == ERROR_MARK
)
9740 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
9745 case BUILT_IN_CONSTANT_P
:
9747 tree val
= fold_builtin_constant_p (arg0
);
9749 /* Gimplification will pull the CALL_EXPR for the builtin out of
9750 an if condition. When not optimizing, we'll not CSE it back.
9751 To avoid link error types of regressions, return false now. */
9752 if (!val
&& !optimize
)
9753 val
= integer_zero_node
;
9758 case BUILT_IN_CLASSIFY_TYPE
:
9759 return fold_builtin_classify_type (arg0
);
9761 case BUILT_IN_STRLEN
:
9762 return fold_builtin_strlen (loc
, expr
, type
, arg0
);
9764 CASE_FLT_FN (BUILT_IN_FABS
):
9765 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
9766 case BUILT_IN_FABSD32
:
9767 case BUILT_IN_FABSD64
:
9768 case BUILT_IN_FABSD128
:
9769 return fold_builtin_fabs (loc
, arg0
, type
);
9773 case BUILT_IN_LLABS
:
9774 case BUILT_IN_IMAXABS
:
9775 return fold_builtin_abs (loc
, arg0
, type
);
9777 CASE_FLT_FN (BUILT_IN_CONJ
):
9778 if (validate_arg (arg0
, COMPLEX_TYPE
)
9779 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9780 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9783 CASE_FLT_FN (BUILT_IN_CREAL
):
9784 if (validate_arg (arg0
, COMPLEX_TYPE
)
9785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9786 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9789 CASE_FLT_FN (BUILT_IN_CIMAG
):
9790 if (validate_arg (arg0
, COMPLEX_TYPE
)
9791 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9792 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9795 CASE_FLT_FN (BUILT_IN_CARG
):
9796 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG
):
9797 return fold_builtin_carg (loc
, arg0
, type
);
9799 case BUILT_IN_ISASCII
:
9800 return fold_builtin_isascii (loc
, arg0
);
9802 case BUILT_IN_TOASCII
:
9803 return fold_builtin_toascii (loc
, arg0
);
9805 case BUILT_IN_ISDIGIT
:
9806 return fold_builtin_isdigit (loc
, arg0
);
9808 CASE_FLT_FN (BUILT_IN_FINITE
):
9809 case BUILT_IN_FINITED32
:
9810 case BUILT_IN_FINITED64
:
9811 case BUILT_IN_FINITED128
:
9812 case BUILT_IN_ISFINITE
:
9814 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9817 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9820 CASE_FLT_FN (BUILT_IN_ISINF
):
9821 case BUILT_IN_ISINFD32
:
9822 case BUILT_IN_ISINFD64
:
9823 case BUILT_IN_ISINFD128
:
9825 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9828 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9831 case BUILT_IN_ISNORMAL
:
9832 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9834 case BUILT_IN_ISINF_SIGN
:
9835 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9837 CASE_FLT_FN (BUILT_IN_ISNAN
):
9838 case BUILT_IN_ISNAND32
:
9839 case BUILT_IN_ISNAND64
:
9840 case BUILT_IN_ISNAND128
:
9841 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9843 case BUILT_IN_ISSIGNALING
:
9844 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISSIGNALING
);
9847 if (integer_zerop (arg0
))
9848 return build_empty_stmt (loc
);
9859 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9860 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9861 if no simplification was possible. */
9864 fold_builtin_2 (location_t loc
, tree expr
, tree fndecl
, tree arg0
, tree arg1
)
9866 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9867 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9869 if (TREE_CODE (arg0
) == ERROR_MARK
9870 || TREE_CODE (arg1
) == ERROR_MARK
)
9873 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9878 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9879 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9880 if (validate_arg (arg0
, REAL_TYPE
)
9881 && validate_arg (arg1
, POINTER_TYPE
))
9882 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9885 CASE_FLT_FN (BUILT_IN_FREXP
):
9886 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9888 CASE_FLT_FN (BUILT_IN_MODF
):
9889 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9891 case BUILT_IN_STRSPN
:
9892 return fold_builtin_strspn (loc
, expr
, arg0
, arg1
);
9894 case BUILT_IN_STRCSPN
:
9895 return fold_builtin_strcspn (loc
, expr
, arg0
, arg1
);
9897 case BUILT_IN_STRPBRK
:
9898 return fold_builtin_strpbrk (loc
, expr
, arg0
, arg1
, type
);
9900 case BUILT_IN_EXPECT
:
9901 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
9903 case BUILT_IN_ISGREATER
:
9904 return fold_builtin_unordered_cmp (loc
, fndecl
,
9905 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9906 case BUILT_IN_ISGREATEREQUAL
:
9907 return fold_builtin_unordered_cmp (loc
, fndecl
,
9908 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9909 case BUILT_IN_ISLESS
:
9910 return fold_builtin_unordered_cmp (loc
, fndecl
,
9911 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9912 case BUILT_IN_ISLESSEQUAL
:
9913 return fold_builtin_unordered_cmp (loc
, fndecl
,
9914 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9915 case BUILT_IN_ISLESSGREATER
:
9916 return fold_builtin_unordered_cmp (loc
, fndecl
,
9917 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9918 case BUILT_IN_ISUNORDERED
:
9919 return fold_builtin_unordered_cmp (loc
, fndecl
,
9920 arg0
, arg1
, UNORDERED_EXPR
,
9923 case BUILT_IN_ISEQSIG
:
9924 return fold_builtin_iseqsig (loc
, arg0
, arg1
);
9926 /* We do the folding for va_start in the expander. */
9927 case BUILT_IN_VA_START
:
9930 case BUILT_IN_OBJECT_SIZE
:
9931 case BUILT_IN_DYNAMIC_OBJECT_SIZE
:
9932 return fold_builtin_object_size (arg0
, arg1
, fcode
);
9934 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9935 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9937 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9938 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9946 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9948 This function returns NULL_TREE if no simplification was possible. */
9951 fold_builtin_3 (location_t loc
, tree fndecl
,
9952 tree arg0
, tree arg1
, tree arg2
)
9954 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9955 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9957 if (TREE_CODE (arg0
) == ERROR_MARK
9958 || TREE_CODE (arg1
) == ERROR_MARK
9959 || TREE_CODE (arg2
) == ERROR_MARK
)
9962 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9969 CASE_FLT_FN (BUILT_IN_SINCOS
):
9970 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9972 CASE_FLT_FN (BUILT_IN_REMQUO
):
9973 if (validate_arg (arg0
, REAL_TYPE
)
9974 && validate_arg (arg1
, REAL_TYPE
)
9975 && validate_arg (arg2
, POINTER_TYPE
))
9976 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9979 case BUILT_IN_MEMCMP
:
9980 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9982 case BUILT_IN_EXPECT
:
9983 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
9985 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
9986 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
9988 case BUILT_IN_ADD_OVERFLOW
:
9989 case BUILT_IN_SUB_OVERFLOW
:
9990 case BUILT_IN_MUL_OVERFLOW
:
9991 case BUILT_IN_ADD_OVERFLOW_P
:
9992 case BUILT_IN_SUB_OVERFLOW_P
:
9993 case BUILT_IN_MUL_OVERFLOW_P
:
9994 case BUILT_IN_SADD_OVERFLOW
:
9995 case BUILT_IN_SADDL_OVERFLOW
:
9996 case BUILT_IN_SADDLL_OVERFLOW
:
9997 case BUILT_IN_SSUB_OVERFLOW
:
9998 case BUILT_IN_SSUBL_OVERFLOW
:
9999 case BUILT_IN_SSUBLL_OVERFLOW
:
10000 case BUILT_IN_SMUL_OVERFLOW
:
10001 case BUILT_IN_SMULL_OVERFLOW
:
10002 case BUILT_IN_SMULLL_OVERFLOW
:
10003 case BUILT_IN_UADD_OVERFLOW
:
10004 case BUILT_IN_UADDL_OVERFLOW
:
10005 case BUILT_IN_UADDLL_OVERFLOW
:
10006 case BUILT_IN_USUB_OVERFLOW
:
10007 case BUILT_IN_USUBL_OVERFLOW
:
10008 case BUILT_IN_USUBLL_OVERFLOW
:
10009 case BUILT_IN_UMUL_OVERFLOW
:
10010 case BUILT_IN_UMULL_OVERFLOW
:
10011 case BUILT_IN_UMULLL_OVERFLOW
:
10012 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10020 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10021 ARGS is an array of NARGS arguments. IGNORE is true if the result
10022 of the function call is ignored. This function returns NULL_TREE
10023 if no simplification was possible. */
10026 fold_builtin_n (location_t loc
, tree expr
, tree fndecl
, tree
*args
,
10029 tree ret
= NULL_TREE
;
10034 ret
= fold_builtin_0 (loc
, fndecl
);
10037 ret
= fold_builtin_1 (loc
, expr
, fndecl
, args
[0]);
10040 ret
= fold_builtin_2 (loc
, expr
, fndecl
, args
[0], args
[1]);
10043 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10046 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10051 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10052 SET_EXPR_LOCATION (ret
, loc
);
10058 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10059 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10060 of arguments in ARGS to be omitted. OLDNARGS is the number of
10061 elements in ARGS. */
10064 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10065 int skip
, tree fndecl
, int n
, va_list newargs
)
10067 int nargs
= oldnargs
- skip
+ n
;
10074 buffer
= XALLOCAVEC (tree
, nargs
);
10075 for (i
= 0; i
< n
; i
++)
10076 buffer
[i
] = va_arg (newargs
, tree
);
10077 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10078 buffer
[i
] = args
[j
];
10081 buffer
= args
+ skip
;
10083 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10086 /* Return true if FNDECL shouldn't be folded right now.
10087 If a built-in function has an inline attribute always_inline
10088 wrapper, defer folding it after always_inline functions have
10089 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10090 might not be performed. */
10093 avoid_folding_inline_builtin (tree fndecl
)
10095 return (DECL_DECLARED_INLINE_P (fndecl
)
10096 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10098 && !cfun
->always_inline_functions_inlined
10099 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10102 /* A wrapper function for builtin folding that prevents warnings for
10103 "statement without effect" and the like, caused by removing the
10104 call node earlier than the warning is generated. */
10107 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10109 tree ret
= NULL_TREE
;
10110 tree fndecl
= get_callee_fndecl (exp
);
10111 if (fndecl
&& fndecl_built_in_p (fndecl
)
10112 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10113 yet. Defer folding until we see all the arguments
10114 (after inlining). */
10115 && !CALL_EXPR_VA_ARG_PACK (exp
))
10117 int nargs
= call_expr_nargs (exp
);
10119 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10120 instead last argument is __builtin_va_arg_pack (). Defer folding
10121 even in that case, until arguments are finalized. */
10122 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10124 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10125 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
10129 if (avoid_folding_inline_builtin (fndecl
))
10132 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10133 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10134 CALL_EXPR_ARGP (exp
), ignore
);
10137 tree
*args
= CALL_EXPR_ARGP (exp
);
10138 ret
= fold_builtin_n (loc
, exp
, fndecl
, args
, nargs
, ignore
);
10146 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10147 N arguments are passed in the array ARGARRAY. Return a folded
10148 expression or NULL_TREE if no simplification was possible. */
10151 fold_builtin_call_array (location_t loc
, tree
,
10156 if (TREE_CODE (fn
) != ADDR_EXPR
)
10159 tree fndecl
= TREE_OPERAND (fn
, 0);
10160 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10161 && fndecl_built_in_p (fndecl
))
10163 /* If last argument is __builtin_va_arg_pack (), arguments to this
10164 function are not finalized yet. Defer folding until they are. */
10165 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10167 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10168 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
10171 if (avoid_folding_inline_builtin (fndecl
))
10173 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10174 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10176 return fold_builtin_n (loc
, NULL_TREE
, fndecl
, argarray
, n
, false);
10182 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10183 along with N new arguments specified as the "..." parameters. SKIP
10184 is the number of arguments in EXP to be omitted. This function is used
10185 to do varargs-to-varargs transformations. */
10188 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10194 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10195 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10201 /* Validate a single argument ARG against a tree code CODE representing
10202 a type. Return true when argument is valid. */
10205 validate_arg (const_tree arg
, enum tree_code code
)
10209 else if (code
== POINTER_TYPE
)
10210 return POINTER_TYPE_P (TREE_TYPE (arg
));
10211 else if (code
== INTEGER_TYPE
)
10212 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10213 return code
== TREE_CODE (TREE_TYPE (arg
));
10216 /* This function validates the types of a function call argument list
10217 against a specified list of tree_codes. If the last specifier is a 0,
10218 that represents an ellipses, otherwise the last specifier must be a
10221 This is the GIMPLE version of validate_arglist. Eventually we want to
10222 completely convert builtins.cc to work from GIMPLEs and the tree based
10223 validate_arglist will then be removed. */
10226 validate_gimple_arglist (const gcall
*call
, ...)
10228 enum tree_code code
;
10234 va_start (ap
, call
);
10239 code
= (enum tree_code
) va_arg (ap
, int);
10243 /* This signifies an ellipses, any further arguments are all ok. */
10247 /* This signifies an endlink, if no arguments remain, return
10248 true, otherwise return false. */
10249 res
= (i
== gimple_call_num_args (call
));
10252 /* If no parameters remain or the parameter's code does not
10253 match the specified code, return false. Otherwise continue
10254 checking any remaining arguments. */
10255 arg
= gimple_call_arg (call
, i
++);
10256 if (!validate_arg (arg
, code
))
10263 /* We need gotos here since we can only have one VA_CLOSE in a
10271 /* Default target-specific builtin expander that does nothing. */
10274 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10275 rtx target ATTRIBUTE_UNUSED
,
10276 rtx subtarget ATTRIBUTE_UNUSED
,
10277 machine_mode mode ATTRIBUTE_UNUSED
,
10278 int ignore ATTRIBUTE_UNUSED
)
10283 /* Returns true is EXP represents data that would potentially reside
10284 in a readonly section. */
10287 readonly_data_expr (tree exp
)
10291 if (TREE_CODE (exp
) != ADDR_EXPR
)
10294 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10298 /* Make sure we call decl_readonly_section only for trees it
10299 can handle (since it returns true for everything it doesn't
10301 if (TREE_CODE (exp
) == STRING_CST
10302 || TREE_CODE (exp
) == CONSTRUCTOR
10303 || (VAR_P (exp
) && TREE_STATIC (exp
)))
10304 return decl_readonly_section (exp
, 0);
10309 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10310 to the call, and TYPE is its return type.
10312 Return NULL_TREE if no simplification was possible, otherwise return the
10313 simplified form of the call as a tree.
10315 The simplified form may be a constant or other expression which
10316 computes the same value, but in a more efficient manner (including
10317 calls to other builtin functions).
10319 The call may contain arguments which need to be evaluated, but
10320 which are not useful to determine the result of the call. In
10321 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10322 COMPOUND_EXPR will be an argument which must be evaluated.
10323 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10324 COMPOUND_EXPR in the chain will contain the tree for the simplified
10325 form of the builtin function call. */
10328 fold_builtin_strpbrk (location_t loc
, tree
, tree s1
, tree s2
, tree type
)
10330 if (!validate_arg (s1
, POINTER_TYPE
)
10331 || !validate_arg (s2
, POINTER_TYPE
))
10335 const char *p1
, *p2
;
10337 p2
= c_getstr (s2
);
10341 p1
= c_getstr (s1
);
10344 const char *r
= strpbrk (p1
, p2
);
10348 return build_int_cst (TREE_TYPE (s1
), 0);
10350 /* Return an offset into the constant string argument. */
10351 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10352 return fold_convert_loc (loc
, type
, tem
);
10356 /* strpbrk(x, "") == NULL.
10357 Evaluate and ignore s1 in case it had side-effects. */
10358 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
10361 return NULL_TREE
; /* Really call strpbrk. */
10363 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10367 /* New argument list transforming strpbrk(s1, s2) to
10368 strchr(s1, s2[0]). */
10369 return build_call_expr_loc (loc
, fn
, 2, s1
,
10370 build_int_cst (integer_type_node
, p2
[0]));
10373 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10376 Return NULL_TREE if no simplification was possible, otherwise return the
10377 simplified form of the call as a tree.
10379 The simplified form may be a constant or other expression which
10380 computes the same value, but in a more efficient manner (including
10381 calls to other builtin functions).
10383 The call may contain arguments which need to be evaluated, but
10384 which are not useful to determine the result of the call. In
10385 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10386 COMPOUND_EXPR will be an argument which must be evaluated.
10387 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10388 COMPOUND_EXPR in the chain will contain the tree for the simplified
10389 form of the builtin function call. */
10392 fold_builtin_strspn (location_t loc
, tree expr
, tree s1
, tree s2
)
10394 if (!validate_arg (s1
, POINTER_TYPE
)
10395 || !validate_arg (s2
, POINTER_TYPE
))
10398 if (!check_nul_terminated_array (expr
, s1
)
10399 || !check_nul_terminated_array (expr
, s2
))
10402 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10404 /* If either argument is "", return NULL_TREE. */
10405 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10406 /* Evaluate and ignore both arguments in case either one has
10408 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10413 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10416 Return NULL_TREE if no simplification was possible, otherwise return the
10417 simplified form of the call as a tree.
10419 The simplified form may be a constant or other expression which
10420 computes the same value, but in a more efficient manner (including
10421 calls to other builtin functions).
10423 The call may contain arguments which need to be evaluated, but
10424 which are not useful to determine the result of the call. In
10425 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10426 COMPOUND_EXPR will be an argument which must be evaluated.
10427 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10428 COMPOUND_EXPR in the chain will contain the tree for the simplified
10429 form of the builtin function call. */
10432 fold_builtin_strcspn (location_t loc
, tree expr
, tree s1
, tree s2
)
10434 if (!validate_arg (s1
, POINTER_TYPE
)
10435 || !validate_arg (s2
, POINTER_TYPE
))
10438 if (!check_nul_terminated_array (expr
, s1
)
10439 || !check_nul_terminated_array (expr
, s2
))
10442 /* If the first argument is "", return NULL_TREE. */
10443 const char *p1
= c_getstr (s1
);
10444 if (p1
&& *p1
== '\0')
10446 /* Evaluate and ignore argument s2 in case it has
10448 return omit_one_operand_loc (loc
, size_type_node
,
10449 size_zero_node
, s2
);
10452 /* If the second argument is "", return __builtin_strlen(s1). */
10453 const char *p2
= c_getstr (s2
);
10454 if (p2
&& *p2
== '\0')
10456 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10458 /* If the replacement _DECL isn't initialized, don't do the
10463 return build_call_expr_loc (loc
, fn
, 1, s1
);
10468 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10469 produced. False otherwise. This is done so that we don't output the error
10470 or warning twice or three times. */
10473 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10475 tree fntype
= TREE_TYPE (current_function_decl
);
10476 int nargs
= call_expr_nargs (exp
);
10478 /* There is good chance the current input_location points inside the
10479 definition of the va_start macro (perhaps on the token for
10480 builtin) in a system header, so warnings will not be emitted.
10481 Use the location in real source code. */
10482 location_t current_location
=
10483 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10486 if (!stdarg_p (fntype
))
10488 error ("%<va_start%> used in function with fixed arguments");
10494 if (va_start_p
&& (nargs
!= 2))
10496 error ("wrong number of arguments to function %<va_start%>");
10499 arg
= CALL_EXPR_ARG (exp
, 1);
10501 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10502 when we checked the arguments and if needed issued a warning. */
10507 /* Evidently an out of date version of <stdarg.h>; can't validate
10508 va_start's second argument, but can still work as intended. */
10509 warning_at (current_location
,
10511 "%<__builtin_next_arg%> called without an argument");
10514 else if (nargs
> 1)
10516 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10519 arg
= CALL_EXPR_ARG (exp
, 0);
10522 if (TREE_CODE (arg
) == SSA_NAME
10523 && SSA_NAME_VAR (arg
))
10524 arg
= SSA_NAME_VAR (arg
);
10526 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10527 or __builtin_next_arg (0) the first time we see it, after checking
10528 the arguments and if needed issuing a warning. */
10529 if (!integer_zerop (arg
))
10531 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10533 /* Strip off all nops for the sake of the comparison. This
10534 is not quite the same as STRIP_NOPS. It does more.
10535 We must also strip off INDIRECT_EXPR for C++ reference
10537 while (CONVERT_EXPR_P (arg
)
10538 || INDIRECT_REF_P (arg
))
10539 arg
= TREE_OPERAND (arg
, 0);
10540 if (arg
!= last_parm
)
10542 /* FIXME: Sometimes with the tree optimizers we can get the
10543 not the last argument even though the user used the last
10544 argument. We just warn and set the arg to be the last
10545 argument so that we will get wrong-code because of
10547 warning_at (current_location
,
10549 "second parameter of %<va_start%> not last named argument");
10552 /* Undefined by C99 7.15.1.4p4 (va_start):
10553 "If the parameter parmN is declared with the register storage
10554 class, with a function or array type, or with a type that is
10555 not compatible with the type that results after application of
10556 the default argument promotions, the behavior is undefined."
10558 else if (DECL_REGISTER (arg
))
10560 warning_at (current_location
,
10562 "undefined behavior when second parameter of "
10563 "%<va_start%> is declared with %<register%> storage");
10566 /* We want to verify the second parameter just once before the tree
10567 optimizers are run and then avoid keeping it in the tree,
10568 as otherwise we could warn even for correct code like:
10569 void foo (int i, ...)
10570 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10572 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10574 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10580 /* Expand a call EXP to __builtin_object_size. */
10583 expand_builtin_object_size (tree exp
)
10586 int object_size_type
;
10587 tree fndecl
= get_callee_fndecl (exp
);
10589 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10591 error ("first argument of %qD must be a pointer, second integer constant",
10593 expand_builtin_trap ();
10597 ost
= CALL_EXPR_ARG (exp
, 1);
10600 if (TREE_CODE (ost
) != INTEGER_CST
10601 || tree_int_cst_sgn (ost
) < 0
10602 || compare_tree_int (ost
, 3) > 0)
10604 error ("last argument of %qD is not integer constant between 0 and 3",
10606 expand_builtin_trap ();
10610 object_size_type
= tree_to_shwi (ost
);
10612 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10615 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10616 FCODE is the BUILT_IN_* to use.
10617 Return NULL_RTX if we failed; the caller should emit a normal call,
10618 otherwise try to get the result in TARGET, if convenient (and in
10619 mode MODE if that's convenient). */
10622 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10623 enum built_in_function fcode
)
10625 if (!validate_arglist (exp
,
10627 fcode
== BUILT_IN_MEMSET_CHK
10628 ? INTEGER_TYPE
: POINTER_TYPE
,
10629 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10632 tree dest
= CALL_EXPR_ARG (exp
, 0);
10633 tree src
= CALL_EXPR_ARG (exp
, 1);
10634 tree len
= CALL_EXPR_ARG (exp
, 2);
10635 tree size
= CALL_EXPR_ARG (exp
, 3);
10637 /* FIXME: Set access mode to write only for memset et al. */
10638 bool sizes_ok
= check_access (exp
, len
, /*maxread=*/NULL_TREE
,
10639 /*srcstr=*/NULL_TREE
, size
, access_read_write
);
10641 if (!tree_fits_uhwi_p (size
))
10644 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10646 /* Avoid transforming the checking call to an ordinary one when
10647 an overflow has been detected or when the call couldn't be
10648 validated because the size is not constant. */
10649 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10652 tree fn
= NULL_TREE
;
10653 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10654 mem{cpy,pcpy,move,set} is available. */
10657 case BUILT_IN_MEMCPY_CHK
:
10658 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10660 case BUILT_IN_MEMPCPY_CHK
:
10661 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10663 case BUILT_IN_MEMMOVE_CHK
:
10664 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10666 case BUILT_IN_MEMSET_CHK
:
10667 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10676 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10677 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10678 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10679 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10681 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10685 unsigned int dest_align
= get_pointer_alignment (dest
);
10687 /* If DEST is not a pointer type, call the normal function. */
10688 if (dest_align
== 0)
10691 /* If SRC and DEST are the same (and not volatile), do nothing. */
10692 if (operand_equal_p (src
, dest
, 0))
10696 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10698 /* Evaluate and ignore LEN in case it has side-effects. */
10699 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10700 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10703 expr
= fold_build_pointer_plus (dest
, len
);
10704 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10707 /* __memmove_chk special case. */
10708 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10710 unsigned int src_align
= get_pointer_alignment (src
);
10712 if (src_align
== 0)
10715 /* If src is categorized for a readonly section we can use
10716 normal __memcpy_chk. */
10717 if (readonly_data_expr (src
))
10719 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10722 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10723 dest
, src
, len
, size
);
10724 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10725 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10726 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10733 /* Emit warning if a buffer overflow is detected at compile time. */
10736 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10738 /* The source string. */
10739 tree srcstr
= NULL_TREE
;
10740 /* The size of the destination object returned by __builtin_object_size. */
10741 tree objsize
= NULL_TREE
;
10742 /* The string that is being concatenated with (as in __strcat_chk)
10743 or null if it isn't. */
10744 tree catstr
= NULL_TREE
;
10745 /* The maximum length of the source sequence in a bounded operation
10746 (such as __strncat_chk) or null if the operation isn't bounded
10747 (such as __strcat_chk). */
10748 tree maxread
= NULL_TREE
;
10749 /* The exact size of the access (such as in __strncpy_chk). */
10750 tree size
= NULL_TREE
;
10751 /* The access by the function that's checked. Except for snprintf
10752 both writing and reading is checked. */
10753 access_mode mode
= access_read_write
;
10757 case BUILT_IN_STRCPY_CHK
:
10758 case BUILT_IN_STPCPY_CHK
:
10759 srcstr
= CALL_EXPR_ARG (exp
, 1);
10760 objsize
= CALL_EXPR_ARG (exp
, 2);
10763 case BUILT_IN_STRCAT_CHK
:
10764 /* For __strcat_chk the warning will be emitted only if overflowing
10765 by at least strlen (dest) + 1 bytes. */
10766 catstr
= CALL_EXPR_ARG (exp
, 0);
10767 srcstr
= CALL_EXPR_ARG (exp
, 1);
10768 objsize
= CALL_EXPR_ARG (exp
, 2);
10771 case BUILT_IN_STRNCAT_CHK
:
10772 catstr
= CALL_EXPR_ARG (exp
, 0);
10773 srcstr
= CALL_EXPR_ARG (exp
, 1);
10774 maxread
= CALL_EXPR_ARG (exp
, 2);
10775 objsize
= CALL_EXPR_ARG (exp
, 3);
10778 case BUILT_IN_STRNCPY_CHK
:
10779 case BUILT_IN_STPNCPY_CHK
:
10780 srcstr
= CALL_EXPR_ARG (exp
, 1);
10781 size
= CALL_EXPR_ARG (exp
, 2);
10782 objsize
= CALL_EXPR_ARG (exp
, 3);
10785 case BUILT_IN_SNPRINTF_CHK
:
10786 case BUILT_IN_VSNPRINTF_CHK
:
10787 maxread
= CALL_EXPR_ARG (exp
, 1);
10788 objsize
= CALL_EXPR_ARG (exp
, 3);
10789 /* The only checked access the write to the destination. */
10790 mode
= access_write_only
;
10793 gcc_unreachable ();
10796 if (catstr
&& maxread
)
10798 /* Check __strncat_chk. There is no way to determine the length
10799 of the string to which the source string is being appended so
10800 just warn when the length of the source string is not known. */
10801 check_strncat_sizes (exp
, objsize
);
10805 check_access (exp
, size
, maxread
, srcstr
, objsize
, mode
);
10808 /* Emit warning if a buffer overflow is detected at compile time
10809 in __sprintf_chk/__vsprintf_chk calls. */
10812 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10814 tree size
, len
, fmt
;
10815 const char *fmt_str
;
10816 int nargs
= call_expr_nargs (exp
);
10818 /* Verify the required arguments in the original call. */
10822 size
= CALL_EXPR_ARG (exp
, 2);
10823 fmt
= CALL_EXPR_ARG (exp
, 3);
10825 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10828 /* Check whether the format is a literal string constant. */
10829 fmt_str
= c_getstr (fmt
);
10830 if (fmt_str
== NULL
)
10833 if (!init_target_chars ())
10836 /* If the format doesn't contain % args or %%, we know its size. */
10837 if (strchr (fmt_str
, target_percent
) == 0)
10838 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10839 /* If the format is "%s" and first ... argument is a string literal,
10841 else if (fcode
== BUILT_IN_SPRINTF_CHK
10842 && strcmp (fmt_str
, target_percent_s
) == 0)
10848 arg
= CALL_EXPR_ARG (exp
, 4);
10849 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10852 len
= c_strlen (arg
, 1);
10853 if (!len
|| ! tree_fits_uhwi_p (len
))
10859 /* Add one for the terminating nul. */
10860 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10862 check_access (exp
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, len
, size
,
10863 access_write_only
);
10866 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10870 fold_builtin_object_size (tree ptr
, tree ost
, enum built_in_function fcode
)
10873 int object_size_type
;
10875 if (!validate_arg (ptr
, POINTER_TYPE
)
10876 || !validate_arg (ost
, INTEGER_TYPE
))
10881 if (TREE_CODE (ost
) != INTEGER_CST
10882 || tree_int_cst_sgn (ost
) < 0
10883 || compare_tree_int (ost
, 3) > 0)
10886 object_size_type
= tree_to_shwi (ost
);
10888 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10889 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10890 and (size_t) 0 for types 2 and 3. */
10891 if (TREE_SIDE_EFFECTS (ptr
))
10892 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10894 if (fcode
== BUILT_IN_DYNAMIC_OBJECT_SIZE
)
10895 object_size_type
|= OST_DYNAMIC
;
10897 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10899 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10900 if ((object_size_type
& OST_DYNAMIC
)
10901 || int_fits_type_p (bytes
, size_type_node
))
10902 return fold_convert (size_type_node
, bytes
);
10904 else if (TREE_CODE (ptr
) == SSA_NAME
)
10906 /* If object size is not known yet, delay folding until
10907 later. Maybe subsequent passes will help determining
10909 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10910 && ((object_size_type
& OST_DYNAMIC
)
10911 || int_fits_type_p (bytes
, size_type_node
)))
10912 return fold_convert (size_type_node
, bytes
);
10918 /* Builtins with folding operations that operate on "..." arguments
10919 need special handling; we need to store the arguments in a convenient
10920 data structure before attempting any folding. Fortunately there are
10921 only a few builtins that fall into this category. FNDECL is the
10922 function, EXP is the CALL_EXPR for the call. */
10925 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10927 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10928 tree ret
= NULL_TREE
;
10932 case BUILT_IN_FPCLASSIFY
:
10933 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10936 case BUILT_IN_ADDC
:
10937 case BUILT_IN_ADDCL
:
10938 case BUILT_IN_ADDCLL
:
10939 case BUILT_IN_SUBC
:
10940 case BUILT_IN_SUBCL
:
10941 case BUILT_IN_SUBCLL
:
10942 return fold_builtin_addc_subc (loc
, fcode
, args
);
10949 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10950 SET_EXPR_LOCATION (ret
, loc
);
10951 suppress_warning (ret
);
10957 /* Initialize format string characters in the target charset. */
10960 init_target_chars (void)
10965 target_newline
= lang_hooks
.to_target_charset ('\n');
10966 target_percent
= lang_hooks
.to_target_charset ('%');
10967 target_c
= lang_hooks
.to_target_charset ('c');
10968 target_s
= lang_hooks
.to_target_charset ('s');
10969 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10973 target_percent_c
[0] = target_percent
;
10974 target_percent_c
[1] = target_c
;
10975 target_percent_c
[2] = '\0';
10977 target_percent_s
[0] = target_percent
;
10978 target_percent_s
[1] = target_s
;
10979 target_percent_s
[2] = '\0';
10981 target_percent_s_newline
[0] = target_percent
;
10982 target_percent_s_newline
[1] = target_s
;
10983 target_percent_s_newline
[2] = target_newline
;
10984 target_percent_s_newline
[3] = '\0';
10991 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10992 and no overflow/underflow occurred. INEXACT is true if M was not
10993 exactly calculated. TYPE is the tree type for the result. This
10994 function assumes that you cleared the MPFR flags and then
10995 calculated M to see if anything subsequently set a flag prior to
10996 entering this function. Return NULL_TREE if any checks fail. */
10999 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11001 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11002 overflow/underflow occurred. If -frounding-math, proceed iff the
11003 result of calling FUNC was exact. */
11004 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11005 && (!flag_rounding_math
|| !inexact
))
11007 REAL_VALUE_TYPE rr
;
11009 real_from_mpfr (&rr
, m
, type
, MPFR_RNDN
);
11010 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11011 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11012 but the mpfr_t is not, then we underflowed in the
11014 if (real_isfinite (&rr
)
11015 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11017 REAL_VALUE_TYPE rmode
;
11019 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11020 /* Proceed iff the specified mode can hold the value. */
11021 if (real_identical (&rmode
, &rr
))
11022 return build_real (type
, rmode
);
11028 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11029 number and no overflow/underflow occurred. INEXACT is true if M
11030 was not exactly calculated. TYPE is the tree type for the result.
11031 This function assumes that you cleared the MPFR flags and then
11032 calculated M to see if anything subsequently set a flag prior to
11033 entering this function. Return NULL_TREE if any checks fail, if
11034 FORCE_CONVERT is true, then bypass the checks. */
11037 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11039 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11040 overflow/underflow occurred. If -frounding-math, proceed iff the
11041 result of calling FUNC was exact. */
11043 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11044 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11045 && (!flag_rounding_math
|| !inexact
)))
11047 REAL_VALUE_TYPE re
, im
;
11049 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), MPFR_RNDN
);
11050 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), MPFR_RNDN
);
11051 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11052 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11053 but the mpfr_t is not, then we underflowed in the
11056 || (real_isfinite (&re
) && real_isfinite (&im
)
11057 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11058 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11060 REAL_VALUE_TYPE re_mode
, im_mode
;
11062 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11063 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11064 /* Proceed iff the specified mode can hold the value. */
11066 || (real_identical (&re_mode
, &re
)
11067 && real_identical (&im_mode
, &im
)))
11068 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11069 build_real (TREE_TYPE (type
), im_mode
));
11075 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11076 the pointer *(ARG_QUO) and return the result. The type is taken
11077 from the type of ARG0 and is used for setting the precision of the
11078 calculation and results. */
11081 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
11083 tree
const type
= TREE_TYPE (arg0
);
11084 tree result
= NULL_TREE
;
11089 /* To proceed, MPFR must exactly represent the target floating point
11090 format, which only happens when the target base equals two. */
11091 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11092 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
11093 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
11095 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
11096 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
11098 if (real_isfinite (ra0
) && real_isfinite (ra1
))
11100 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11101 const int prec
= fmt
->p
;
11102 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
11107 mpfr_inits2 (prec
, m0
, m1
, NULL
);
11108 mpfr_from_real (m0
, ra0
, MPFR_RNDN
);
11109 mpfr_from_real (m1
, ra1
, MPFR_RNDN
);
11110 mpfr_clear_flags ();
11111 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
11112 /* Remquo is independent of the rounding mode, so pass
11113 inexact=0 to do_mpfr_ckconv(). */
11114 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
11115 mpfr_clears (m0
, m1
, NULL
);
11118 /* MPFR calculates quo in the host's long so it may
11119 return more bits in quo than the target int can hold
11120 if sizeof(host long) > sizeof(target int). This can
11121 happen even for native compilers in LP64 mode. In
11122 these cases, modulo the quo value with the largest
11123 number that the target int can hold while leaving one
11124 bit for the sign. */
11125 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
11126 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
11128 /* Dereference the quo pointer argument. */
11129 arg_quo
= build_fold_indirect_ref (arg_quo
);
11130 /* Proceed iff a valid pointer type was passed in. */
11131 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
11133 /* Set the value. */
11135 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
11136 build_int_cst (TREE_TYPE (arg_quo
),
11138 TREE_SIDE_EFFECTS (result_quo
) = 1;
11139 /* Combine the quo assignment with the rem. */
11140 result
= fold_build2 (COMPOUND_EXPR
, type
,
11141 result_quo
, result_rem
);
11142 suppress_warning (result
, OPT_Wunused_value
);
11143 result
= non_lvalue (result
);
11151 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11152 resulting value as a tree with type TYPE. The mpfr precision is
11153 set to the precision of TYPE. We assume that this mpfr function
11154 returns zero if the result could be calculated exactly within the
11155 requested precision. In addition, the integer pointer represented
11156 by ARG_SG will be dereferenced and set to the appropriate signgam
11160 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
11162 tree result
= NULL_TREE
;
11166 /* To proceed, MPFR must exactly represent the target floating point
11167 format, which only happens when the target base equals two. Also
11168 verify ARG is a constant and that ARG_SG is an int pointer. */
11169 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11170 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
11171 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
11172 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
11174 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
11176 /* In addition to NaN and Inf, the argument cannot be zero or a
11177 negative integer. */
11178 if (real_isfinite (ra
)
11179 && ra
->cl
!= rvc_zero
11180 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
11182 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11183 const int prec
= fmt
->p
;
11184 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
11188 auto_mpfr
m (prec
);
11189 mpfr_from_real (m
, ra
, MPFR_RNDN
);
11190 mpfr_clear_flags ();
11191 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
11192 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11197 /* Dereference the arg_sg pointer argument. */
11198 arg_sg
= build_fold_indirect_ref (arg_sg
);
11199 /* Assign the signgam value into *arg_sg. */
11200 result_sg
= fold_build2 (MODIFY_EXPR
,
11201 TREE_TYPE (arg_sg
), arg_sg
,
11202 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11203 TREE_SIDE_EFFECTS (result_sg
) = 1;
11204 /* Combine the signgam assignment with the lgamma result. */
11205 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11206 result_sg
, result_lg
));
11214 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11215 mpc function FUNC on it and return the resulting value as a tree
11216 with type TYPE. The mpfr precision is set to the precision of
11217 TYPE. We assume that function FUNC returns zero if the result
11218 could be calculated exactly within the requested precision. If
11219 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11220 in the arguments and/or results. */
11223 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
11224 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
11226 tree result
= NULL_TREE
;
11231 /* To proceed, MPFR must exactly represent the target floating point
11232 format, which only happens when the target base equals two. */
11233 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
11234 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0
)))
11235 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
11236 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1
)))
11237 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
11239 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
11240 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
11241 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
11242 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
11245 || (real_isfinite (re0
) && real_isfinite (im0
)
11246 && real_isfinite (re1
) && real_isfinite (im1
)))
11248 const struct real_format
*const fmt
=
11249 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11250 const int prec
= fmt
->p
;
11251 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
11252 ? MPFR_RNDZ
: MPFR_RNDN
;
11253 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11257 mpc_init2 (m0
, prec
);
11258 mpc_init2 (m1
, prec
);
11259 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
11260 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
11261 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
11262 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
11263 mpfr_clear_flags ();
11264 inexact
= func (m0
, m0
, m1
, crnd
);
11265 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11274 /* A wrapper function for builtin folding that prevents warnings for
11275 "statement without effect" and the like, caused by removing the
11276 call node earlier than the warning is generated. */
11279 fold_call_stmt (gcall
*stmt
, bool ignore
)
11281 tree ret
= NULL_TREE
;
11282 tree fndecl
= gimple_call_fndecl (stmt
);
11283 location_t loc
= gimple_location (stmt
);
11284 if (fndecl
&& fndecl_built_in_p (fndecl
)
11285 && !gimple_call_va_arg_pack_p (stmt
))
11287 int nargs
= gimple_call_num_args (stmt
);
11288 tree
*args
= (nargs
> 0
11289 ? gimple_call_arg_ptr (stmt
, 0)
11290 : &error_mark_node
);
11292 if (avoid_folding_inline_builtin (fndecl
))
11294 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11296 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11300 ret
= fold_builtin_n (loc
, NULL_TREE
, fndecl
, args
, nargs
, ignore
);
11303 /* Propagate location information from original call to
11304 expansion of builtin. Otherwise things like
11305 maybe_emit_chk_warning, that operate on the expansion
11306 of a builtin, will use the wrong location information. */
11307 if (gimple_has_location (stmt
))
11309 tree realret
= ret
;
11310 if (TREE_CODE (ret
) == NOP_EXPR
)
11311 realret
= TREE_OPERAND (ret
, 0);
11312 if (CAN_HAVE_LOCATION_P (realret
)
11313 && !EXPR_HAS_LOCATION (realret
))
11314 SET_EXPR_LOCATION (realret
, loc
);
11324 /* Look up the function in builtin_decl that corresponds to DECL
11325 and set ASMSPEC as its user assembler name. DECL must be a
11326 function decl that declares a builtin. */
11329 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11331 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
11334 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11335 set_user_assembler_name (builtin
, asmspec
);
11337 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
11338 && INT_TYPE_SIZE
< BITS_PER_WORD
)
11340 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
11341 set_user_assembler_libfunc ("ffs", asmspec
);
11342 set_optab_libfunc (ffs_optab
, mode
, "ffs");
11346 /* Return true if DECL is a builtin that expands to a constant or similarly
11349 is_simple_builtin (tree decl
)
11351 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
11352 switch (DECL_FUNCTION_CODE (decl
))
11354 /* Builtins that expand to constants. */
11355 case BUILT_IN_CONSTANT_P
:
11356 case BUILT_IN_EXPECT
:
11357 case BUILT_IN_OBJECT_SIZE
:
11358 case BUILT_IN_UNREACHABLE
:
11359 /* Simple register moves or loads from stack. */
11360 case BUILT_IN_ASSUME_ALIGNED
:
11361 case BUILT_IN_RETURN_ADDRESS
:
11362 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11363 case BUILT_IN_FROB_RETURN_ADDR
:
11364 case BUILT_IN_RETURN
:
11365 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11366 case BUILT_IN_FRAME_ADDRESS
:
11367 case BUILT_IN_VA_END
:
11368 case BUILT_IN_STACK_SAVE
:
11369 case BUILT_IN_STACK_RESTORE
:
11370 case BUILT_IN_DWARF_CFA
:
11371 /* Exception state returns or moves registers around. */
11372 case BUILT_IN_EH_FILTER
:
11373 case BUILT_IN_EH_POINTER
:
11374 case BUILT_IN_EH_COPY_VALUES
:
11384 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11385 most probably expanded inline into reasonably simple code. This is a
11386 superset of is_simple_builtin. */
11388 is_inexpensive_builtin (tree decl
)
11392 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11394 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11395 switch (DECL_FUNCTION_CODE (decl
))
11398 CASE_BUILT_IN_ALLOCA
:
11399 case BUILT_IN_BSWAP16
:
11400 case BUILT_IN_BSWAP32
:
11401 case BUILT_IN_BSWAP64
:
11402 case BUILT_IN_BSWAP128
:
11404 case BUILT_IN_CLZIMAX
:
11405 case BUILT_IN_CLZL
:
11406 case BUILT_IN_CLZLL
:
11408 case BUILT_IN_CTZIMAX
:
11409 case BUILT_IN_CTZL
:
11410 case BUILT_IN_CTZLL
:
11412 case BUILT_IN_FFSIMAX
:
11413 case BUILT_IN_FFSL
:
11414 case BUILT_IN_FFSLL
:
11415 case BUILT_IN_IMAXABS
:
11416 case BUILT_IN_FINITE
:
11417 case BUILT_IN_FINITEF
:
11418 case BUILT_IN_FINITEL
:
11419 case BUILT_IN_FINITED32
:
11420 case BUILT_IN_FINITED64
:
11421 case BUILT_IN_FINITED128
:
11422 case BUILT_IN_FPCLASSIFY
:
11423 case BUILT_IN_ISFINITE
:
11424 case BUILT_IN_ISINF_SIGN
:
11425 case BUILT_IN_ISINF
:
11426 case BUILT_IN_ISINFF
:
11427 case BUILT_IN_ISINFL
:
11428 case BUILT_IN_ISINFD32
:
11429 case BUILT_IN_ISINFD64
:
11430 case BUILT_IN_ISINFD128
:
11431 case BUILT_IN_ISNAN
:
11432 case BUILT_IN_ISNANF
:
11433 case BUILT_IN_ISNANL
:
11434 case BUILT_IN_ISNAND32
:
11435 case BUILT_IN_ISNAND64
:
11436 case BUILT_IN_ISNAND128
:
11437 case BUILT_IN_ISNORMAL
:
11438 case BUILT_IN_ISGREATER
:
11439 case BUILT_IN_ISGREATEREQUAL
:
11440 case BUILT_IN_ISLESS
:
11441 case BUILT_IN_ISLESSEQUAL
:
11442 case BUILT_IN_ISLESSGREATER
:
11443 case BUILT_IN_ISUNORDERED
:
11444 case BUILT_IN_ISEQSIG
:
11445 case BUILT_IN_VA_ARG_PACK
:
11446 case BUILT_IN_VA_ARG_PACK_LEN
:
11447 case BUILT_IN_VA_COPY
:
11448 case BUILT_IN_TRAP
:
11449 case BUILT_IN_UNREACHABLE_TRAP
:
11450 case BUILT_IN_SAVEREGS
:
11451 case BUILT_IN_POPCOUNTL
:
11452 case BUILT_IN_POPCOUNTLL
:
11453 case BUILT_IN_POPCOUNTIMAX
:
11454 case BUILT_IN_POPCOUNT
:
11455 case BUILT_IN_PARITYL
:
11456 case BUILT_IN_PARITYLL
:
11457 case BUILT_IN_PARITYIMAX
:
11458 case BUILT_IN_PARITY
:
11459 case BUILT_IN_LABS
:
11460 case BUILT_IN_LLABS
:
11461 case BUILT_IN_PREFETCH
:
11462 case BUILT_IN_ACC_ON_DEVICE
:
11466 return is_simple_builtin (decl
);
11472 /* Return true if T is a constant and the value cast to a target char
11473 can be represented by a host char.
11474 Store the casted char constant in *P if so. */
11477 target_char_cst_p (tree t
, char *p
)
11479 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
11482 *p
= (char)tree_to_uhwi (t
);
11486 /* Return true if the builtin DECL is implemented in a standard library.
11487 Otherwise return false which doesn't guarantee it is not (thus the list
11488 of handled builtins below may be incomplete). */
11491 builtin_with_linkage_p (tree decl
)
11493 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11494 switch (DECL_FUNCTION_CODE (decl
))
11496 CASE_FLT_FN (BUILT_IN_ACOS
):
11497 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS
):
11498 CASE_FLT_FN (BUILT_IN_ACOSH
):
11499 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH
):
11500 CASE_FLT_FN (BUILT_IN_ASIN
):
11501 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN
):
11502 CASE_FLT_FN (BUILT_IN_ASINH
):
11503 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH
):
11504 CASE_FLT_FN (BUILT_IN_ATAN
):
11505 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN
):
11506 CASE_FLT_FN (BUILT_IN_ATANH
):
11507 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH
):
11508 CASE_FLT_FN (BUILT_IN_ATAN2
):
11509 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2
):
11510 CASE_FLT_FN (BUILT_IN_CBRT
):
11511 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT
):
11512 CASE_FLT_FN (BUILT_IN_CEIL
):
11513 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL
):
11514 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
11515 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
11516 CASE_FLT_FN (BUILT_IN_COS
):
11517 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS
):
11518 CASE_FLT_FN (BUILT_IN_COSH
):
11519 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH
):
11520 CASE_FLT_FN (BUILT_IN_ERF
):
11521 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF
):
11522 CASE_FLT_FN (BUILT_IN_ERFC
):
11523 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC
):
11524 CASE_FLT_FN (BUILT_IN_EXP
):
11525 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP
):
11526 CASE_FLT_FN (BUILT_IN_EXP2
):
11527 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2
):
11528 CASE_FLT_FN (BUILT_IN_EXPM1
):
11529 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1
):
11530 CASE_FLT_FN (BUILT_IN_FABS
):
11531 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
11532 CASE_FLT_FN (BUILT_IN_FDIM
):
11533 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM
):
11534 CASE_FLT_FN (BUILT_IN_FLOOR
):
11535 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR
):
11536 CASE_FLT_FN (BUILT_IN_FMA
):
11537 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
11538 CASE_FLT_FN (BUILT_IN_FMAX
):
11539 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX
):
11540 CASE_FLT_FN (BUILT_IN_FMIN
):
11541 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN
):
11542 CASE_FLT_FN (BUILT_IN_FMOD
):
11543 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD
):
11544 CASE_FLT_FN (BUILT_IN_FREXP
):
11545 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP
):
11546 CASE_FLT_FN (BUILT_IN_HYPOT
):
11547 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT
):
11548 CASE_FLT_FN (BUILT_IN_ILOGB
):
11549 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB
):
11550 CASE_FLT_FN (BUILT_IN_LDEXP
):
11551 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP
):
11552 CASE_FLT_FN (BUILT_IN_LGAMMA
):
11553 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA
):
11554 CASE_FLT_FN (BUILT_IN_LLRINT
):
11555 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT
):
11556 CASE_FLT_FN (BUILT_IN_LLROUND
):
11557 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND
):
11558 CASE_FLT_FN (BUILT_IN_LOG
):
11559 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG
):
11560 CASE_FLT_FN (BUILT_IN_LOG10
):
11561 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10
):
11562 CASE_FLT_FN (BUILT_IN_LOG1P
):
11563 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P
):
11564 CASE_FLT_FN (BUILT_IN_LOG2
):
11565 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2
):
11566 CASE_FLT_FN (BUILT_IN_LOGB
):
11567 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB
):
11568 CASE_FLT_FN (BUILT_IN_LRINT
):
11569 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT
):
11570 CASE_FLT_FN (BUILT_IN_LROUND
):
11571 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND
):
11572 CASE_FLT_FN (BUILT_IN_MODF
):
11573 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF
):
11574 CASE_FLT_FN (BUILT_IN_NAN
):
11575 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN
):
11576 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
11577 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT
):
11578 CASE_FLT_FN (BUILT_IN_NEXTAFTER
):
11579 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER
):
11580 CASE_FLT_FN (BUILT_IN_NEXTTOWARD
):
11581 CASE_FLT_FN (BUILT_IN_POW
):
11582 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW
):
11583 CASE_FLT_FN (BUILT_IN_REMAINDER
):
11584 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER
):
11585 CASE_FLT_FN (BUILT_IN_REMQUO
):
11586 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO
):
11587 CASE_FLT_FN (BUILT_IN_RINT
):
11588 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT
):
11589 CASE_FLT_FN (BUILT_IN_ROUND
):
11590 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND
):
11591 CASE_FLT_FN (BUILT_IN_SCALBLN
):
11592 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN
):
11593 CASE_FLT_FN (BUILT_IN_SCALBN
):
11594 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN
):
11595 CASE_FLT_FN (BUILT_IN_SIN
):
11596 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN
):
11597 CASE_FLT_FN (BUILT_IN_SINH
):
11598 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH
):
11599 CASE_FLT_FN (BUILT_IN_SINCOS
):
11600 CASE_FLT_FN (BUILT_IN_SQRT
):
11601 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT
):
11602 CASE_FLT_FN (BUILT_IN_TAN
):
11603 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN
):
11604 CASE_FLT_FN (BUILT_IN_TANH
):
11605 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH
):
11606 CASE_FLT_FN (BUILT_IN_TGAMMA
):
11607 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA
):
11608 CASE_FLT_FN (BUILT_IN_TRUNC
):
11609 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC
):
11612 case BUILT_IN_STPCPY
:
11613 case BUILT_IN_STPNCPY
:
11614 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11615 by libiberty's stpcpy.c for MinGW targets so we need to return true
11616 in order to be able to build libiberty in LTO mode for them. */
11625 /* Return true if OFFRNG is bounded to a subrange of offset values
11626 valid for the largest possible object. */
11629 access_ref::offset_bounded () const
11631 tree min
= TYPE_MIN_VALUE (ptrdiff_type_node
);
11632 tree max
= TYPE_MAX_VALUE (ptrdiff_type_node
);
11633 return wi::to_offset (min
) <= offrng
[0] && offrng
[1] <= wi::to_offset (max
);
11636 /* If CALLEE has known side effects, fill in INFO and return true.
11637 See tree-ssa-structalias.cc:find_func_aliases
11638 for the list of builtins we might need to handle here. */
11641 builtin_fnspec (tree callee
)
11643 built_in_function code
= DECL_FUNCTION_CODE (callee
);
11647 /* All the following functions read memory pointed to by
11648 their second argument and write memory pointed to by first
11650 strcat/strncat additionally reads memory pointed to by the first
11652 case BUILT_IN_STRCAT
:
11653 case BUILT_IN_STRCAT_CHK
:
11655 case BUILT_IN_STRNCAT
:
11656 case BUILT_IN_STRNCAT_CHK
:
11658 case BUILT_IN_STRCPY
:
11659 case BUILT_IN_STRCPY_CHK
:
11661 case BUILT_IN_STPCPY
:
11662 case BUILT_IN_STPCPY_CHK
:
11664 case BUILT_IN_STRNCPY
:
11665 case BUILT_IN_MEMCPY
:
11666 case BUILT_IN_MEMMOVE
:
11667 case BUILT_IN_TM_MEMCPY
:
11668 case BUILT_IN_TM_MEMMOVE
:
11669 case BUILT_IN_STRNCPY_CHK
:
11670 case BUILT_IN_MEMCPY_CHK
:
11671 case BUILT_IN_MEMMOVE_CHK
:
11673 case BUILT_IN_MEMPCPY
:
11674 case BUILT_IN_MEMPCPY_CHK
:
11676 case BUILT_IN_STPNCPY
:
11677 case BUILT_IN_STPNCPY_CHK
:
11679 case BUILT_IN_BCOPY
:
11681 case BUILT_IN_BZERO
:
11683 case BUILT_IN_MEMCMP
:
11684 case BUILT_IN_MEMCMP_EQ
:
11685 case BUILT_IN_BCMP
:
11686 case BUILT_IN_STRNCMP
:
11687 case BUILT_IN_STRNCMP_EQ
:
11688 case BUILT_IN_STRNCASECMP
:
11691 /* The following functions read memory pointed to by their
11693 CASE_BUILT_IN_TM_LOAD (1):
11694 CASE_BUILT_IN_TM_LOAD (2):
11695 CASE_BUILT_IN_TM_LOAD (4):
11696 CASE_BUILT_IN_TM_LOAD (8):
11697 CASE_BUILT_IN_TM_LOAD (FLOAT
):
11698 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
11699 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
11700 CASE_BUILT_IN_TM_LOAD (M64
):
11701 CASE_BUILT_IN_TM_LOAD (M128
):
11702 CASE_BUILT_IN_TM_LOAD (M256
):
11703 case BUILT_IN_TM_LOG
:
11704 case BUILT_IN_TM_LOG_1
:
11705 case BUILT_IN_TM_LOG_2
:
11706 case BUILT_IN_TM_LOG_4
:
11707 case BUILT_IN_TM_LOG_8
:
11708 case BUILT_IN_TM_LOG_FLOAT
:
11709 case BUILT_IN_TM_LOG_DOUBLE
:
11710 case BUILT_IN_TM_LOG_LDOUBLE
:
11711 case BUILT_IN_TM_LOG_M64
:
11712 case BUILT_IN_TM_LOG_M128
:
11713 case BUILT_IN_TM_LOG_M256
:
11716 case BUILT_IN_INDEX
:
11717 case BUILT_IN_RINDEX
:
11718 case BUILT_IN_STRCHR
:
11719 case BUILT_IN_STRLEN
:
11720 case BUILT_IN_STRRCHR
:
11722 case BUILT_IN_STRNLEN
:
11725 /* These read memory pointed to by the first argument.
11726 Allocating memory does not have any side-effects apart from
11727 being the definition point for the pointer.
11728 Unix98 specifies that errno is set on allocation failure. */
11729 case BUILT_IN_STRDUP
:
11731 case BUILT_IN_STRNDUP
:
11733 /* Allocating memory does not have any side-effects apart from
11734 being the definition point for the pointer. */
11735 case BUILT_IN_MALLOC
:
11736 case BUILT_IN_ALIGNED_ALLOC
:
11737 case BUILT_IN_CALLOC
:
11738 case BUILT_IN_GOMP_ALLOC
:
11740 CASE_BUILT_IN_ALLOCA
:
11742 /* These read memory pointed to by the first argument with size
11743 in the third argument. */
11744 case BUILT_IN_MEMCHR
:
11746 /* These read memory pointed to by the first and second arguments. */
11747 case BUILT_IN_STRSTR
:
11748 case BUILT_IN_STRPBRK
:
11749 case BUILT_IN_STRCASECMP
:
11750 case BUILT_IN_STRCSPN
:
11751 case BUILT_IN_STRSPN
:
11752 case BUILT_IN_STRCMP
:
11753 case BUILT_IN_STRCMP_EQ
:
11755 /* Freeing memory kills the pointed-to memory. More importantly
11756 the call has to serve as a barrier for moving loads and stores
11758 case BUILT_IN_STACK_RESTORE
:
11759 case BUILT_IN_FREE
:
11760 case BUILT_IN_GOMP_FREE
:
11762 case BUILT_IN_VA_END
:
11764 /* Realloc serves both as allocation point and deallocation point. */
11765 case BUILT_IN_REALLOC
:
11767 case BUILT_IN_GAMMA_R
:
11768 case BUILT_IN_GAMMAF_R
:
11769 case BUILT_IN_GAMMAL_R
:
11770 case BUILT_IN_LGAMMA_R
:
11771 case BUILT_IN_LGAMMAF_R
:
11772 case BUILT_IN_LGAMMAL_R
:
11774 case BUILT_IN_FREXP
:
11775 case BUILT_IN_FREXPF
:
11776 case BUILT_IN_FREXPL
:
11777 case BUILT_IN_MODF
:
11778 case BUILT_IN_MODFF
:
11779 case BUILT_IN_MODFL
:
11781 case BUILT_IN_REMQUO
:
11782 case BUILT_IN_REMQUOF
:
11783 case BUILT_IN_REMQUOL
:
11785 case BUILT_IN_SINCOS
:
11786 case BUILT_IN_SINCOSF
:
11787 case BUILT_IN_SINCOSL
:
11789 case BUILT_IN_MEMSET
:
11790 case BUILT_IN_MEMSET_CHK
:
11791 case BUILT_IN_TM_MEMSET
:
11793 CASE_BUILT_IN_TM_STORE (1):
11794 CASE_BUILT_IN_TM_STORE (2):
11795 CASE_BUILT_IN_TM_STORE (4):
11796 CASE_BUILT_IN_TM_STORE (8):
11797 CASE_BUILT_IN_TM_STORE (FLOAT
):
11798 CASE_BUILT_IN_TM_STORE (DOUBLE
):
11799 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
11800 CASE_BUILT_IN_TM_STORE (M64
):
11801 CASE_BUILT_IN_TM_STORE (M128
):
11802 CASE_BUILT_IN_TM_STORE (M256
):
11804 case BUILT_IN_STACK_SAVE
:
11805 case BUILT_IN_RETURN
:
11806 case BUILT_IN_EH_POINTER
:
11807 case BUILT_IN_EH_FILTER
:
11808 case BUILT_IN_UNWIND_RESUME
:
11809 case BUILT_IN_CXA_END_CLEANUP
:
11810 case BUILT_IN_EH_COPY_VALUES
:
11811 case BUILT_IN_FRAME_ADDRESS
:
11812 case BUILT_IN_APPLY_ARGS
:
11813 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
11814 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
11815 case BUILT_IN_PREFETCH
:
11816 case BUILT_IN_DWARF_CFA
:
11817 case BUILT_IN_RETURN_ADDRESS
:
11819 case BUILT_IN_ASSUME_ALIGNED
:
11820 case BUILT_IN_EXPECT
:
11821 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
11823 /* But posix_memalign stores a pointer into the memory pointed to
11824 by its first argument. */
11825 case BUILT_IN_POSIX_MEMALIGN
: