1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins
;
87 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names
[BUILT_IN_LAST
]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names
[(int) END_BUILTINS
] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info
[(int)END_BUILTINS
];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p
;
107 static int target_char_cast (tree
, char *);
108 static rtx
get_memory_rtx (tree
, tree
);
109 static int apply_args_size (void);
110 static int apply_result_size (void);
111 static rtx
result_vector (int, rtx
);
112 static void expand_builtin_prefetch (tree
);
113 static rtx
expand_builtin_apply_args (void);
114 static rtx
expand_builtin_apply_args_1 (void);
115 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
116 static void expand_builtin_return (rtx
);
117 static enum type_class
type_to_class (tree
);
118 static rtx
expand_builtin_classify_type (tree
);
119 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
120 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
121 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
122 static rtx
expand_builtin_sincos (tree
);
123 static rtx
expand_builtin_cexpi (tree
, rtx
);
124 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
125 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
126 static rtx
expand_builtin_next_arg (void);
127 static rtx
expand_builtin_va_start (tree
);
128 static rtx
expand_builtin_va_end (tree
);
129 static rtx
expand_builtin_va_copy (tree
);
130 static rtx
inline_expand_builtin_bytecmp (tree
, rtx
);
131 static rtx
expand_builtin_strcmp (tree
, rtx
);
132 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
133 static rtx
expand_builtin_memcpy (tree
, rtx
);
134 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
135 rtx target
, tree exp
,
138 static rtx
expand_builtin_memmove (tree
, rtx
);
139 static rtx
expand_builtin_mempcpy (tree
, rtx
);
140 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, memop_ret
);
141 static rtx
expand_builtin_strcpy (tree
, rtx
);
142 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
143 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
144 static rtx
expand_builtin_strncpy (tree
, rtx
);
145 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
147 static rtx
expand_builtin_bzero (tree
);
148 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
149 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
150 static rtx
expand_builtin_alloca (tree
);
151 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
152 static rtx
expand_builtin_frame_address (tree
, tree
);
153 static tree
stabilize_va_list_loc (location_t
, tree
, int);
154 static rtx
expand_builtin_expect (tree
, rtx
);
155 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
156 static tree
fold_builtin_constant_p (tree
);
157 static tree
fold_builtin_classify_type (tree
);
158 static tree
fold_builtin_strlen (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_inf (location_t
, tree
, int);
160 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
161 static bool validate_arg (const_tree
, enum tree_code code
);
162 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
163 static rtx
expand_builtin_signbit (tree
, rtx
);
164 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_isascii (location_t
, tree
);
166 static tree
fold_builtin_toascii (location_t
, tree
);
167 static tree
fold_builtin_isdigit (location_t
, tree
);
168 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
169 static tree
fold_builtin_abs (location_t
, tree
, tree
);
170 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
172 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
174 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
, tree
);
175 static tree
fold_builtin_strspn (location_t
, tree
, tree
, tree
);
176 static tree
fold_builtin_strcspn (location_t
, tree
, tree
, tree
);
178 static rtx
expand_builtin_object_size (tree
);
179 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
180 enum built_in_function
);
181 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
182 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
183 static tree
fold_builtin_object_size (tree
, tree
);
185 unsigned HOST_WIDE_INT target_newline
;
186 unsigned HOST_WIDE_INT target_percent
;
187 static unsigned HOST_WIDE_INT target_c
;
188 static unsigned HOST_WIDE_INT target_s
;
189 char target_percent_c
[3];
190 char target_percent_s
[3];
191 char target_percent_s_newline
[4];
192 static tree
do_mpfr_remquo (tree
, tree
, tree
);
193 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
194 static void expand_builtin_sync_synchronize (void);
196 /* Return true if NAME starts with __builtin_ or __sync_. */
199 is_builtin_name (const char *name
)
201 return (startswith (name
, "__builtin_")
202 || startswith (name
, "__sync_")
203 || startswith (name
, "__atomic_"));
206 /* Return true if NODE should be considered for inline expansion regardless
207 of the optimization level. This means whenever a function is invoked with
208 its "internal" name, which normally contains the prefix "__builtin". */
211 called_as_built_in (tree node
)
213 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
214 we want the name used to call the function, not the name it
216 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
217 return is_builtin_name (name
);
220 /* Compute values M and N such that M divides (address of EXP - N) and such
221 that N < M. If these numbers can be determined, store M in alignp and N in
222 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
223 *alignp and any bit-offset to *bitposp.
225 Note that the address (and thus the alignment) computed here is based
226 on the address to which a symbol resolves, whereas DECL_ALIGN is based
227 on the address at which an object is actually located. These two
228 addresses are not always the same. For example, on ARM targets,
229 the address &foo of a Thumb function foo() has the lowest bit set,
230 whereas foo() itself starts on an even address.
232 If ADDR_P is true we are taking the address of the memory reference EXP
233 and thus cannot rely on the access taking place. */
236 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
237 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
239 poly_int64 bitsize
, bitpos
;
242 int unsignedp
, reversep
, volatilep
;
243 unsigned int align
= BITS_PER_UNIT
;
244 bool known_alignment
= false;
246 /* Get the innermost object and the constant (bitpos) and possibly
247 variable (offset) offset of the access. */
248 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
249 &unsignedp
, &reversep
, &volatilep
);
251 /* Extract alignment information from the innermost object and
252 possibly adjust bitpos and offset. */
253 if (TREE_CODE (exp
) == FUNCTION_DECL
)
255 /* Function addresses can encode extra information besides their
256 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
257 allows the low bit to be used as a virtual bit, we know
258 that the address itself must be at least 2-byte aligned. */
259 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
260 align
= 2 * BITS_PER_UNIT
;
262 else if (TREE_CODE (exp
) == LABEL_DECL
)
264 else if (TREE_CODE (exp
) == CONST_DECL
)
266 /* The alignment of a CONST_DECL is determined by its initializer. */
267 exp
= DECL_INITIAL (exp
);
268 align
= TYPE_ALIGN (TREE_TYPE (exp
));
269 if (CONSTANT_CLASS_P (exp
))
270 align
= targetm
.constant_alignment (exp
, align
);
272 known_alignment
= true;
274 else if (DECL_P (exp
))
276 align
= DECL_ALIGN (exp
);
277 known_alignment
= true;
279 else if (TREE_CODE (exp
) == INDIRECT_REF
280 || TREE_CODE (exp
) == MEM_REF
281 || TREE_CODE (exp
) == TARGET_MEM_REF
)
283 tree addr
= TREE_OPERAND (exp
, 0);
285 unsigned HOST_WIDE_INT ptr_bitpos
;
286 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
288 /* If the address is explicitely aligned, handle that. */
289 if (TREE_CODE (addr
) == BIT_AND_EXPR
290 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
292 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
293 ptr_bitmask
*= BITS_PER_UNIT
;
294 align
= least_bit_hwi (ptr_bitmask
);
295 addr
= TREE_OPERAND (addr
, 0);
299 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
300 align
= MAX (ptr_align
, align
);
302 /* Re-apply explicit alignment to the bitpos. */
303 ptr_bitpos
&= ptr_bitmask
;
305 /* The alignment of the pointer operand in a TARGET_MEM_REF
306 has to take the variable offset parts into account. */
307 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
311 unsigned HOST_WIDE_INT step
= 1;
313 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
314 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
316 if (TMR_INDEX2 (exp
))
317 align
= BITS_PER_UNIT
;
318 known_alignment
= false;
321 /* When EXP is an actual memory reference then we can use
322 TYPE_ALIGN of a pointer indirection to derive alignment.
323 Do so only if get_pointer_alignment_1 did not reveal absolute
324 alignment knowledge and if using that alignment would
325 improve the situation. */
327 if (!addr_p
&& !known_alignment
328 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
333 /* Else adjust bitpos accordingly. */
334 bitpos
+= ptr_bitpos
;
335 if (TREE_CODE (exp
) == MEM_REF
336 || TREE_CODE (exp
) == TARGET_MEM_REF
)
337 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
340 else if (TREE_CODE (exp
) == STRING_CST
)
342 /* STRING_CST are the only constant objects we allow to be not
343 wrapped inside a CONST_DECL. */
344 align
= TYPE_ALIGN (TREE_TYPE (exp
));
345 if (CONSTANT_CLASS_P (exp
))
346 align
= targetm
.constant_alignment (exp
, align
);
348 known_alignment
= true;
351 /* If there is a non-constant offset part extract the maximum
352 alignment that can prevail. */
355 unsigned int trailing_zeros
= tree_ctz (offset
);
356 if (trailing_zeros
< HOST_BITS_PER_INT
)
358 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
360 align
= MIN (align
, inner
);
364 /* Account for the alignment of runtime coefficients, so that the constant
365 bitpos is guaranteed to be accurate. */
366 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
367 if (alt_align
!= 0 && alt_align
< align
)
370 known_alignment
= false;
374 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
375 return known_alignment
;
378 /* For a memory reference expression EXP compute values M and N such that M
379 divides (&EXP - N) and such that N < M. If these numbers can be determined,
380 store M in alignp and N in *BITPOSP and return true. Otherwise return false
381 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
384 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
385 unsigned HOST_WIDE_INT
*bitposp
)
387 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
389 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
390 exp
= TREE_OPERAND (exp
, 0);
391 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
394 /* Return the alignment in bits of EXP, an object. */
397 get_object_alignment (tree exp
)
399 unsigned HOST_WIDE_INT bitpos
= 0;
402 get_object_alignment_1 (exp
, &align
, &bitpos
);
404 /* align and bitpos now specify known low bits of the pointer.
405 ptr & (align - 1) == bitpos. */
408 align
= least_bit_hwi (bitpos
);
412 /* For a pointer valued expression EXP compute values M and N such that M
413 divides (EXP - N) and such that N < M. If these numbers can be determined,
414 store M in alignp and N in *BITPOSP and return true. Return false if
415 the results are just a conservative approximation.
417 If EXP is not a pointer, false is returned too. */
420 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
421 unsigned HOST_WIDE_INT
*bitposp
)
425 if (TREE_CODE (exp
) == ADDR_EXPR
)
426 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
427 alignp
, bitposp
, true);
428 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
431 unsigned HOST_WIDE_INT bitpos
;
432 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
434 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
435 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
438 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
439 if (trailing_zeros
< HOST_BITS_PER_INT
)
441 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
443 align
= MIN (align
, inner
);
447 *bitposp
= bitpos
& (align
- 1);
450 else if (TREE_CODE (exp
) == SSA_NAME
451 && POINTER_TYPE_P (TREE_TYPE (exp
)))
453 unsigned int ptr_align
, ptr_misalign
;
454 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
456 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
458 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
459 *alignp
= ptr_align
* BITS_PER_UNIT
;
460 /* Make sure to return a sensible alignment when the multiplication
461 by BITS_PER_UNIT overflowed. */
463 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
464 /* We cannot really tell whether this result is an approximation. */
470 *alignp
= BITS_PER_UNIT
;
474 else if (TREE_CODE (exp
) == INTEGER_CST
)
476 *alignp
= BIGGEST_ALIGNMENT
;
477 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
478 & (BIGGEST_ALIGNMENT
- 1));
483 *alignp
= BITS_PER_UNIT
;
487 /* Return the alignment in bits of EXP, a pointer valued expression.
488 The alignment returned is, by default, the alignment of the thing that
489 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491 Otherwise, look at the expression to see if we can do better, i.e., if the
492 expression is actually pointing at an object whose alignment is tighter. */
495 get_pointer_alignment (tree exp
)
497 unsigned HOST_WIDE_INT bitpos
= 0;
500 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
502 /* align and bitpos now specify known low bits of the pointer.
503 ptr & (align - 1) == bitpos. */
506 align
= least_bit_hwi (bitpos
);
511 /* Return the number of leading non-zero elements in the sequence
512 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
513 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
518 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
524 /* Optimize the common case of plain char. */
525 for (n
= 0; n
< maxelts
; n
++)
527 const char *elt
= (const char*) ptr
+ n
;
534 for (n
= 0; n
< maxelts
; n
++)
536 const char *elt
= (const char*) ptr
+ n
* eltsize
;
537 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
544 /* Compute the length of a null-terminated character string or wide
545 character string handling character sizes of 1, 2, and 4 bytes.
546 TREE_STRING_LENGTH is not the right way because it evaluates to
547 the size of the character array in bytes (as opposed to characters)
548 and because it can contain a zero byte in the middle.
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
561 Additional information about the string accessed may be recorded
562 in DATA. For example, if ARG references an unterminated string,
563 then the declaration will be stored in the DECL field. If the
564 length of the unterminated string can be determined, it'll be
565 stored in the LEN field. Note this length could well be different
566 than what a C strlen call would return.
568 ELTSIZE is 1 for normal single byte character strings, and 2 or
569 4 for wide characer strings. ELTSIZE is by default 1.
571 The value returned is of type `ssizetype'. */
574 c_strlen (tree arg
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
576 /* If we were not passed a DATA pointer, then get one to a local
577 structure. That avoids having to check DATA for NULL before
578 each time we want to use it. */
579 c_strlen_data local_strlen_data
= { };
581 data
= &local_strlen_data
;
583 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
585 tree src
= STRIP_NOPS (arg
);
586 if (TREE_CODE (src
) == COND_EXPR
587 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
591 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
592 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
593 if (tree_int_cst_equal (len1
, len2
))
597 if (TREE_CODE (src
) == COMPOUND_EXPR
598 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
599 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
601 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
603 /* Offset from the beginning of the string in bytes. */
607 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
611 /* Determine the size of the string element. */
612 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
615 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
616 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
617 in case the latter is less than the size of the array, such as when
618 SRC refers to a short string literal used to initialize a large array.
619 In that case, the elements of the array after the terminating NUL are
621 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
622 strelts
= strelts
/ eltsize
;
624 if (!tree_fits_uhwi_p (memsize
))
627 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
629 /* PTR can point to the byte representation of any string type, including
630 char* and wchar_t*. */
631 const char *ptr
= TREE_STRING_POINTER (src
);
633 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
635 /* The code below works only for single byte character types. */
639 /* If the string has an internal NUL character followed by any
640 non-NUL characters (e.g., "foo\0bar"), we can't compute
641 the offset to the following NUL if we don't know where to
642 start searching for it. */
643 unsigned len
= string_length (ptr
, eltsize
, strelts
);
645 /* Return when an embedded null character is found or none at all.
646 In the latter case, set the DECL/LEN field in the DATA structure
647 so that callers may examine them. */
648 if (len
+ 1 < strelts
)
650 else if (len
>= maxelts
)
654 data
->minlen
= ssize_int (len
);
658 /* For empty strings the result should be zero. */
660 return ssize_int (0);
662 /* We don't know the starting offset, but we do know that the string
663 has no internal zero bytes. If the offset falls within the bounds
664 of the string subtract the offset from the length of the string,
665 and return that. Otherwise the length is zero. Take care to
666 use SAVE_EXPR in case the OFFSET has side-effects. */
667 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
669 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
670 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
672 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
674 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
675 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
676 build_zero_cst (ssizetype
));
679 /* Offset from the beginning of the string in elements. */
680 HOST_WIDE_INT eltoff
;
682 /* We have a known offset into the string. Start searching there for
683 a null character if we can represent it as a single HOST_WIDE_INT. */
686 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
689 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
691 /* If the offset is known to be out of bounds, warn, and call strlen at
693 if (eltoff
< 0 || eltoff
>= maxelts
)
695 /* Suppress multiple warnings for propagated constant strings. */
697 && !warning_suppressed_p (arg
, OPT_Warray_bounds
)
698 && warning_at (loc
, OPT_Warray_bounds
,
699 "offset %qwi outside bounds of constant string",
703 inform (DECL_SOURCE_LOCATION (decl
), "%qE declared here", decl
);
704 suppress_warning (arg
, OPT_Warray_bounds
);
709 /* If eltoff is larger than strelts but less than maxelts the
710 string length is zero, since the excess memory will be zero. */
711 if (eltoff
> strelts
)
712 return ssize_int (0);
714 /* Use strlen to search for the first zero byte. Since any strings
715 constructed with build_string will have nulls appended, we win even
716 if we get handed something like (char[4])"abcd".
718 Since ELTOFF is our starting index into the string, no further
719 calculation is needed. */
720 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
723 /* Don't know what to return if there was no zero termination.
724 Ideally this would turn into a gcc_checking_assert over time.
725 Set DECL/LEN so callers can examine them. */
726 if (len
>= maxelts
- eltoff
)
730 data
->minlen
= ssize_int (len
);
734 return ssize_int (len
);
737 /* Return a constant integer corresponding to target reading
738 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
739 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
740 are assumed to be zero, otherwise it reads as many characters
744 c_readstr (const char *str
, scalar_int_mode mode
,
745 bool null_terminated_p
/*=true*/)
749 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
751 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
752 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
753 / HOST_BITS_PER_WIDE_INT
;
755 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
756 for (i
= 0; i
< len
; i
++)
760 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
763 if (WORDS_BIG_ENDIAN
)
764 j
= GET_MODE_SIZE (mode
) - i
- 1;
765 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
766 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
767 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
770 if (ch
|| !null_terminated_p
)
771 ch
= (unsigned char) str
[i
];
772 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
775 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
776 return immed_wide_int_const (c
, mode
);
779 /* Cast a target constant CST to target CHAR and if that value fits into
780 host char type, return zero and put that value into variable pointed to by
784 target_char_cast (tree cst
, char *p
)
786 unsigned HOST_WIDE_INT val
, hostval
;
788 if (TREE_CODE (cst
) != INTEGER_CST
789 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
792 /* Do not care if it fits or not right here. */
793 val
= TREE_INT_CST_LOW (cst
);
795 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
796 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
799 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
800 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
809 /* Similar to save_expr, but assumes that arbitrary code is not executed
810 in between the multiple evaluations. In particular, we assume that a
811 non-addressable local variable will not be modified. */
814 builtin_save_expr (tree exp
)
816 if (TREE_CODE (exp
) == SSA_NAME
817 || (TREE_ADDRESSABLE (exp
) == 0
818 && (TREE_CODE (exp
) == PARM_DECL
819 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
822 return save_expr (exp
);
825 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
826 times to get the address of either a higher stack frame, or a return
827 address located within it (depending on FNDECL_CODE). */
830 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
833 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
836 /* For a zero count with __builtin_return_address, we don't care what
837 frame address we return, because target-specific definitions will
838 override us. Therefore frame pointer elimination is OK, and using
839 the soft frame pointer is OK.
841 For a nonzero count, or a zero count with __builtin_frame_address,
842 we require a stable offset from the current frame pointer to the
843 previous one, so we must use the hard frame pointer, and
844 we must disable frame pointer elimination. */
845 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
846 tem
= frame_pointer_rtx
;
849 tem
= hard_frame_pointer_rtx
;
851 /* Tell reload not to eliminate the frame pointer. */
852 crtl
->accesses_prior_frames
= 1;
857 SETUP_FRAME_ADDRESSES ();
859 /* On the SPARC, the return address is not in the frame, it is in a
860 register. There is no way to access it off of the current frame
861 pointer, but it can be accessed off the previous frame pointer by
862 reading the value from the register window save area. */
863 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
866 /* Scan back COUNT frames to the specified frame. */
867 for (i
= 0; i
< count
; i
++)
869 /* Assume the dynamic chain pointer is in the word that the
870 frame address points to, unless otherwise specified. */
871 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
872 tem
= memory_address (Pmode
, tem
);
873 tem
= gen_frame_mem (Pmode
, tem
);
874 tem
= copy_to_reg (tem
);
877 /* For __builtin_frame_address, return what we've got. But, on
878 the SPARC for example, we may have to add a bias. */
879 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
880 return FRAME_ADDR_RTX (tem
);
882 /* For __builtin_return_address, get the return address from that frame. */
883 #ifdef RETURN_ADDR_RTX
884 tem
= RETURN_ADDR_RTX (count
, tem
);
886 tem
= memory_address (Pmode
,
887 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
888 tem
= gen_frame_mem (Pmode
, tem
);
893 /* Alias set used for setjmp buffer. */
894 static alias_set_type setjmp_alias_set
= -1;
896 /* Construct the leading half of a __builtin_setjmp call. Control will
897 return to RECEIVER_LABEL. This is also called directly by the SJLJ
898 exception handling code. */
901 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
903 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
907 if (setjmp_alias_set
== -1)
908 setjmp_alias_set
= new_alias_set ();
910 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
912 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
914 /* We store the frame pointer and the address of receiver_label in
915 the buffer and use the rest of it for the stack save area, which
916 is machine-dependent. */
918 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
919 set_mem_alias_set (mem
, setjmp_alias_set
);
920 emit_move_insn (mem
, hard_frame_pointer_rtx
);
922 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
923 GET_MODE_SIZE (Pmode
))),
924 set_mem_alias_set (mem
, setjmp_alias_set
);
926 emit_move_insn (validize_mem (mem
),
927 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
929 stack_save
= gen_rtx_MEM (sa_mode
,
930 plus_constant (Pmode
, buf_addr
,
931 2 * GET_MODE_SIZE (Pmode
)));
932 set_mem_alias_set (stack_save
, setjmp_alias_set
);
933 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
935 /* If there is further processing to do, do it. */
936 if (targetm
.have_builtin_setjmp_setup ())
937 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
939 /* We have a nonlocal label. */
940 cfun
->has_nonlocal_label
= 1;
943 /* Construct the trailing part of a __builtin_setjmp call. This is
944 also called directly by the SJLJ exception handling code.
945 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
948 expand_builtin_setjmp_receiver (rtx receiver_label
)
952 /* Mark the FP as used when we get here, so we have to make sure it's
953 marked as used by this function. */
954 emit_use (hard_frame_pointer_rtx
);
956 /* Mark the static chain as clobbered here so life information
957 doesn't get messed up for it. */
958 chain
= rtx_for_static_chain (current_function_decl
, true);
959 if (chain
&& REG_P (chain
))
960 emit_clobber (chain
);
962 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
964 /* If the argument pointer can be eliminated in favor of the
965 frame pointer, we don't need to restore it. We assume here
966 that if such an elimination is present, it can always be used.
967 This is the case on all known machines; if we don't make this
968 assumption, we do unnecessary saving on many machines. */
970 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
972 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
973 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
974 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
977 if (i
== ARRAY_SIZE (elim_regs
))
979 /* Now restore our arg pointer from the address at which it
980 was saved in our stack frame. */
981 emit_move_insn (crtl
->args
.internal_arg_pointer
,
982 copy_to_reg (get_arg_pointer_save_area ()));
986 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
987 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
988 else if (targetm
.have_nonlocal_goto_receiver ())
989 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
993 /* We must not allow the code we just generated to be reordered by
994 scheduling. Specifically, the update of the frame pointer must
995 happen immediately, not later. */
996 emit_insn (gen_blockage ());
999 /* __builtin_longjmp is passed a pointer to an array of five words (not
1000 all will be used on all machines). It operates similarly to the C
1001 library function of the same name, but is more efficient. Much of
1002 the code below is copied from the handling of non-local gotos. */
1005 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1008 rtx_insn
*insn
, *last
;
1009 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1011 /* DRAP is needed for stack realign if longjmp is expanded to current
1013 if (SUPPORTS_STACK_ALIGNMENT
)
1014 crtl
->need_drap
= true;
1016 if (setjmp_alias_set
== -1)
1017 setjmp_alias_set
= new_alias_set ();
1019 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1021 buf_addr
= force_reg (Pmode
, buf_addr
);
1023 /* We require that the user must pass a second argument of 1, because
1024 that is what builtin_setjmp will return. */
1025 gcc_assert (value
== const1_rtx
);
1027 last
= get_last_insn ();
1028 if (targetm
.have_builtin_longjmp ())
1029 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1032 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1033 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1034 GET_MODE_SIZE (Pmode
)));
1036 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1037 2 * GET_MODE_SIZE (Pmode
)));
1038 set_mem_alias_set (fp
, setjmp_alias_set
);
1039 set_mem_alias_set (lab
, setjmp_alias_set
);
1040 set_mem_alias_set (stack
, setjmp_alias_set
);
1042 /* Pick up FP, label, and SP from the block and jump. This code is
1043 from expand_goto in stmt.c; see there for detailed comments. */
1044 if (targetm
.have_nonlocal_goto ())
1045 /* We have to pass a value to the nonlocal_goto pattern that will
1046 get copied into the static_chain pointer, but it does not matter
1047 what that value is, because builtin_setjmp does not use it. */
1048 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1051 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1052 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1054 lab
= copy_to_reg (lab
);
1056 /* Restore the frame pointer and stack pointer. We must use a
1057 temporary since the setjmp buffer may be a local. */
1058 fp
= copy_to_reg (fp
);
1059 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1061 /* Ensure the frame pointer move is not optimized. */
1062 emit_insn (gen_blockage ());
1063 emit_clobber (hard_frame_pointer_rtx
);
1064 emit_clobber (frame_pointer_rtx
);
1065 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1067 emit_use (hard_frame_pointer_rtx
);
1068 emit_use (stack_pointer_rtx
);
1069 emit_indirect_jump (lab
);
1073 /* Search backwards and mark the jump insn as a non-local goto.
1074 Note that this precludes the use of __builtin_longjmp to a
1075 __builtin_setjmp target in the same function. However, we've
1076 already cautioned the user that these functions are for
1077 internal exception handling use only. */
1078 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1080 gcc_assert (insn
!= last
);
1084 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1087 else if (CALL_P (insn
))
1093 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1095 return (iter
->i
< iter
->n
);
1098 /* This function validates the types of a function call argument list
1099 against a specified list of tree_codes. If the last specifier is a 0,
1100 that represents an ellipsis, otherwise the last specifier must be a
1104 validate_arglist (const_tree callexpr
, ...)
1106 enum tree_code code
;
1109 const_call_expr_arg_iterator iter
;
1112 va_start (ap
, callexpr
);
1113 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1115 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1116 tree fn
= CALL_EXPR_FN (callexpr
);
1117 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1119 for (unsigned argno
= 1; ; ++argno
)
1121 code
= (enum tree_code
) va_arg (ap
, int);
1126 /* This signifies an ellipses, any further arguments are all ok. */
1130 /* This signifies an endlink, if no arguments remain, return
1131 true, otherwise return false. */
1132 res
= !more_const_call_expr_args_p (&iter
);
1135 /* The actual argument must be nonnull when either the whole
1136 called function has been declared nonnull, or when the formal
1137 argument corresponding to the actual argument has been. */
1139 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1141 arg
= next_const_call_expr_arg (&iter
);
1142 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1148 /* If no parameters remain or the parameter's code does not
1149 match the specified code, return false. Otherwise continue
1150 checking any remaining arguments. */
1151 arg
= next_const_call_expr_arg (&iter
);
1152 if (!validate_arg (arg
, code
))
1158 /* We need gotos here since we can only have one VA_CLOSE in a
1163 BITMAP_FREE (argmap
);
1168 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1169 and the address of the save area. */
1172 expand_builtin_nonlocal_goto (tree exp
)
1174 tree t_label
, t_save_area
;
1175 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1178 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1181 t_label
= CALL_EXPR_ARG (exp
, 0);
1182 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1184 r_label
= expand_normal (t_label
);
1185 r_label
= convert_memory_address (Pmode
, r_label
);
1186 r_save_area
= expand_normal (t_save_area
);
1187 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1188 /* Copy the address of the save location to a register just in case it was
1189 based on the frame pointer. */
1190 r_save_area
= copy_to_reg (r_save_area
);
1191 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1192 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1193 plus_constant (Pmode
, r_save_area
,
1194 GET_MODE_SIZE (Pmode
)));
1196 crtl
->has_nonlocal_goto
= 1;
1198 /* ??? We no longer need to pass the static chain value, afaik. */
1199 if (targetm
.have_nonlocal_goto ())
1200 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1203 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1204 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1206 r_label
= copy_to_reg (r_label
);
1208 /* Restore the frame pointer and stack pointer. We must use a
1209 temporary since the setjmp buffer may be a local. */
1210 r_fp
= copy_to_reg (r_fp
);
1211 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1213 /* Ensure the frame pointer move is not optimized. */
1214 emit_insn (gen_blockage ());
1215 emit_clobber (hard_frame_pointer_rtx
);
1216 emit_clobber (frame_pointer_rtx
);
1217 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1219 /* USE of hard_frame_pointer_rtx added for consistency;
1220 not clear if really needed. */
1221 emit_use (hard_frame_pointer_rtx
);
1222 emit_use (stack_pointer_rtx
);
1224 /* If the architecture is using a GP register, we must
1225 conservatively assume that the target function makes use of it.
1226 The prologue of functions with nonlocal gotos must therefore
1227 initialize the GP register to the appropriate value, and we
1228 must then make sure that this value is live at the point
1229 of the jump. (Note that this doesn't necessarily apply
1230 to targets with a nonlocal_goto pattern; they are free
1231 to implement it in their own way. Note also that this is
1232 a no-op if the GP register is a global invariant.) */
1233 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1234 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1235 emit_use (pic_offset_table_rtx
);
1237 emit_indirect_jump (r_label
);
1240 /* Search backwards to the jump insn and mark it as a
1242 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1246 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1249 else if (CALL_P (insn
))
1256 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1257 (not all will be used on all machines) that was passed to __builtin_setjmp.
1258 It updates the stack pointer in that block to the current value. This is
1259 also called directly by the SJLJ exception handling code. */
1262 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1264 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1265 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1267 = gen_rtx_MEM (sa_mode
,
1270 plus_constant (Pmode
, buf_addr
,
1271 2 * GET_MODE_SIZE (Pmode
))));
1273 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1276 /* Expand a call to __builtin_prefetch. For a target that does not support
1277 data prefetch, evaluate the memory address argument in case it has side
1281 expand_builtin_prefetch (tree exp
)
1283 tree arg0
, arg1
, arg2
;
1287 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1290 arg0
= CALL_EXPR_ARG (exp
, 0);
1292 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1293 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1295 nargs
= call_expr_nargs (exp
);
1297 arg1
= CALL_EXPR_ARG (exp
, 1);
1299 arg1
= integer_zero_node
;
1301 arg2
= CALL_EXPR_ARG (exp
, 2);
1303 arg2
= integer_three_node
;
1305 /* Argument 0 is an address. */
1306 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1308 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1309 if (TREE_CODE (arg1
) != INTEGER_CST
)
1311 error ("second argument to %<__builtin_prefetch%> must be a constant");
1312 arg1
= integer_zero_node
;
1314 op1
= expand_normal (arg1
);
1315 /* Argument 1 must be either zero or one. */
1316 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1318 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1323 /* Argument 2 (locality) must be a compile-time constant int. */
1324 if (TREE_CODE (arg2
) != INTEGER_CST
)
1326 error ("third argument to %<__builtin_prefetch%> must be a constant");
1327 arg2
= integer_zero_node
;
1329 op2
= expand_normal (arg2
);
1330 /* Argument 2 must be 0, 1, 2, or 3. */
1331 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1333 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1337 if (targetm
.have_prefetch ())
1339 class expand_operand ops
[3];
1341 create_address_operand (&ops
[0], op0
);
1342 create_integer_operand (&ops
[1], INTVAL (op1
));
1343 create_integer_operand (&ops
[2], INTVAL (op2
));
1344 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1348 /* Don't do anything with direct references to volatile memory, but
1349 generate code to handle other side effects. */
1350 if (!MEM_P (op0
) && side_effects_p (op0
))
1354 /* Get a MEM rtx for expression EXP which is the address of an operand
1355 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1356 the maximum length of the block of memory that might be accessed or
1360 get_memory_rtx (tree exp
, tree len
)
1362 tree orig_exp
= exp
;
1365 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1366 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1367 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1368 exp
= TREE_OPERAND (exp
, 0);
1370 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1371 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1373 /* Get an expression we can use to find the attributes to assign to MEM.
1374 First remove any nops. */
1375 while (CONVERT_EXPR_P (exp
)
1376 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1377 exp
= TREE_OPERAND (exp
, 0);
1379 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1380 (as builtin stringops may alias with anything). */
1381 exp
= fold_build2 (MEM_REF
,
1382 build_array_type (char_type_node
,
1383 build_range_type (sizetype
,
1384 size_one_node
, len
)),
1385 exp
, build_int_cst (ptr_type_node
, 0));
1387 /* If the MEM_REF has no acceptable address, try to get the base object
1388 from the original address we got, and build an all-aliasing
1389 unknown-sized access to that one. */
1390 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1391 set_mem_attributes (mem
, exp
, 0);
1392 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1393 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1396 exp
= build_fold_addr_expr (exp
);
1397 exp
= fold_build2 (MEM_REF
,
1398 build_array_type (char_type_node
,
1399 build_range_type (sizetype
,
1402 exp
, build_int_cst (ptr_type_node
, 0));
1403 set_mem_attributes (mem
, exp
, 0);
1405 set_mem_alias_set (mem
, 0);
1409 /* Built-in functions to perform an untyped call and return. */
1411 #define apply_args_mode \
1412 (this_target_builtins->x_apply_args_mode)
1413 #define apply_result_mode \
1414 (this_target_builtins->x_apply_result_mode)
1416 /* Return the size required for the block returned by __builtin_apply_args,
1417 and initialize apply_args_mode. */
1420 apply_args_size (void)
1422 static int size
= -1;
1426 /* The values computed by this function never change. */
1429 /* The first value is the incoming arg-pointer. */
1430 size
= GET_MODE_SIZE (Pmode
);
1432 /* The second value is the structure value address unless this is
1433 passed as an "invisible" first argument. */
1434 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1435 size
+= GET_MODE_SIZE (Pmode
);
1437 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1438 if (FUNCTION_ARG_REGNO_P (regno
))
1440 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1442 gcc_assert (mode
!= VOIDmode
);
1444 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1445 if (size
% align
!= 0)
1446 size
= CEIL (size
, align
) * align
;
1447 size
+= GET_MODE_SIZE (mode
);
1448 apply_args_mode
[regno
] = mode
;
1452 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1458 /* Return the size required for the block returned by __builtin_apply,
1459 and initialize apply_result_mode. */
1462 apply_result_size (void)
1464 static int size
= -1;
1467 /* The values computed by this function never change. */
1472 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1473 if (targetm
.calls
.function_value_regno_p (regno
))
1475 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1477 gcc_assert (mode
!= VOIDmode
);
1479 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1480 if (size
% align
!= 0)
1481 size
= CEIL (size
, align
) * align
;
1482 size
+= GET_MODE_SIZE (mode
);
1483 apply_result_mode
[regno
] = mode
;
1486 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1488 /* Allow targets that use untyped_call and untyped_return to override
1489 the size so that machine-specific information can be stored here. */
1490 #ifdef APPLY_RESULT_SIZE
1491 size
= APPLY_RESULT_SIZE
;
1497 /* Create a vector describing the result block RESULT. If SAVEP is true,
1498 the result block is used to save the values; otherwise it is used to
1499 restore the values. */
1502 result_vector (int savep
, rtx result
)
1504 int regno
, size
, align
, nelts
;
1505 fixed_size_mode mode
;
1507 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1510 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1511 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1513 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1514 if (size
% align
!= 0)
1515 size
= CEIL (size
, align
) * align
;
1516 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1517 mem
= adjust_address (result
, mode
, size
);
1518 savevec
[nelts
++] = (savep
1519 ? gen_rtx_SET (mem
, reg
)
1520 : gen_rtx_SET (reg
, mem
));
1521 size
+= GET_MODE_SIZE (mode
);
1523 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1526 /* Save the state required to perform an untyped call with the same
1527 arguments as were passed to the current function. */
1530 expand_builtin_apply_args_1 (void)
1533 int size
, align
, regno
;
1534 fixed_size_mode mode
;
1535 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1537 /* Create a block where the arg-pointer, structure value address,
1538 and argument registers can be saved. */
1539 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1541 /* Walk past the arg-pointer and structure value address. */
1542 size
= GET_MODE_SIZE (Pmode
);
1543 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1544 size
+= GET_MODE_SIZE (Pmode
);
1546 /* Save each register used in calling a function to the block. */
1547 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1548 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1550 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1551 if (size
% align
!= 0)
1552 size
= CEIL (size
, align
) * align
;
1554 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1556 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1557 size
+= GET_MODE_SIZE (mode
);
1560 /* Save the arg pointer to the block. */
1561 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1562 /* We need the pointer as the caller actually passed them to us, not
1563 as we might have pretended they were passed. Make sure it's a valid
1564 operand, as emit_move_insn isn't expected to handle a PLUS. */
1565 if (STACK_GROWS_DOWNWARD
)
1567 = force_operand (plus_constant (Pmode
, tem
,
1568 crtl
->args
.pretend_args_size
),
1570 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1572 size
= GET_MODE_SIZE (Pmode
);
1574 /* Save the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 if (struct_incoming_value
)
1577 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1578 copy_to_reg (struct_incoming_value
));
1580 /* Return the address of the block. */
1581 return copy_addr_to_reg (XEXP (registers
, 0));
1584 /* __builtin_apply_args returns block of memory allocated on
1585 the stack into which is stored the arg pointer, structure
1586 value address, static chain, and all the registers that might
1587 possibly be used in performing a function call. The code is
1588 moved to the start of the function so the incoming values are
1592 expand_builtin_apply_args (void)
1594 /* Don't do __builtin_apply_args more than once in a function.
1595 Save the result of the first call and reuse it. */
1596 if (apply_args_value
!= 0)
1597 return apply_args_value
;
1599 /* When this function is called, it means that registers must be
1600 saved on entry to this function. So we migrate the
1601 call to the first insn of this function. */
1605 temp
= expand_builtin_apply_args_1 ();
1606 rtx_insn
*seq
= get_insns ();
1609 apply_args_value
= temp
;
1611 /* Put the insns after the NOTE that starts the function.
1612 If this is inside a start_sequence, make the outer-level insn
1613 chain current, so the code is placed at the start of the
1614 function. If internal_arg_pointer is a non-virtual pseudo,
1615 it needs to be placed after the function that initializes
1617 push_topmost_sequence ();
1618 if (REG_P (crtl
->args
.internal_arg_pointer
)
1619 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1620 emit_insn_before (seq
, parm_birth_insn
);
1622 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1623 pop_topmost_sequence ();
1628 /* Perform an untyped call and save the state required to perform an
1629 untyped return of whatever value was returned by the given function. */
1632 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1634 int size
, align
, regno
;
1635 fixed_size_mode mode
;
1636 rtx incoming_args
, result
, reg
, dest
, src
;
1637 rtx_call_insn
*call_insn
;
1638 rtx old_stack_level
= 0;
1639 rtx call_fusage
= 0;
1640 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1642 arguments
= convert_memory_address (Pmode
, arguments
);
1644 /* Create a block where the return registers can be saved. */
1645 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1647 /* Fetch the arg pointer from the ARGUMENTS block. */
1648 incoming_args
= gen_reg_rtx (Pmode
);
1649 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1650 if (!STACK_GROWS_DOWNWARD
)
1651 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1652 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1654 /* Push a new argument block and copy the arguments. Do not allow
1655 the (potential) memcpy call below to interfere with our stack
1657 do_pending_stack_adjust ();
1660 /* Save the stack with nonlocal if available. */
1661 if (targetm
.have_save_stack_nonlocal ())
1662 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1664 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1666 /* Allocate a block of memory onto the stack and copy the memory
1667 arguments to the outgoing arguments address. We can pass TRUE
1668 as the 4th argument because we just saved the stack pointer
1669 and will restore it right after the call. */
1670 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1672 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1673 may have already set current_function_calls_alloca to true.
1674 current_function_calls_alloca won't be set if argsize is zero,
1675 so we have to guarantee need_drap is true here. */
1676 if (SUPPORTS_STACK_ALIGNMENT
)
1677 crtl
->need_drap
= true;
1679 dest
= virtual_outgoing_args_rtx
;
1680 if (!STACK_GROWS_DOWNWARD
)
1682 if (CONST_INT_P (argsize
))
1683 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1685 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1687 dest
= gen_rtx_MEM (BLKmode
, dest
);
1688 set_mem_align (dest
, PARM_BOUNDARY
);
1689 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1690 set_mem_align (src
, PARM_BOUNDARY
);
1691 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1693 /* Refer to the argument block. */
1695 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1696 set_mem_align (arguments
, PARM_BOUNDARY
);
1698 /* Walk past the arg-pointer and structure value address. */
1699 size
= GET_MODE_SIZE (Pmode
);
1701 size
+= GET_MODE_SIZE (Pmode
);
1703 /* Restore each of the registers previously saved. Make USE insns
1704 for each of these registers for use in making the call. */
1705 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1706 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1708 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1709 if (size
% align
!= 0)
1710 size
= CEIL (size
, align
) * align
;
1711 reg
= gen_rtx_REG (mode
, regno
);
1712 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1713 use_reg (&call_fusage
, reg
);
1714 size
+= GET_MODE_SIZE (mode
);
1717 /* Restore the structure value address unless this is passed as an
1718 "invisible" first argument. */
1719 size
= GET_MODE_SIZE (Pmode
);
1722 rtx value
= gen_reg_rtx (Pmode
);
1723 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1724 emit_move_insn (struct_value
, value
);
1725 if (REG_P (struct_value
))
1726 use_reg (&call_fusage
, struct_value
);
1729 /* All arguments and registers used for the call are set up by now! */
1730 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1732 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1733 and we don't want to load it into a register as an optimization,
1734 because prepare_call_address already did it if it should be done. */
1735 if (GET_CODE (function
) != SYMBOL_REF
)
1736 function
= memory_address (FUNCTION_MODE
, function
);
1738 /* Generate the actual call instruction and save the return value. */
1739 if (targetm
.have_untyped_call ())
1741 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1742 rtx_insn
*seq
= targetm
.gen_untyped_call (mem
, result
,
1743 result_vector (1, result
));
1744 for (rtx_insn
*insn
= seq
; insn
; insn
= NEXT_INSN (insn
))
1746 add_reg_note (insn
, REG_UNTYPED_CALL
, NULL_RTX
);
1749 else if (targetm
.have_call_value ())
1753 /* Locate the unique return register. It is not possible to
1754 express a call that sets more than one return register using
1755 call_value; use untyped_call for that. In fact, untyped_call
1756 only needs to save the return registers in the given block. */
1757 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1758 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1760 gcc_assert (!valreg
); /* have_untyped_call required. */
1762 valreg
= gen_rtx_REG (mode
, regno
);
1765 emit_insn (targetm
.gen_call_value (valreg
,
1766 gen_rtx_MEM (FUNCTION_MODE
, function
),
1767 const0_rtx
, NULL_RTX
, const0_rtx
));
1769 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1774 /* Find the CALL insn we just emitted, and attach the register usage
1776 call_insn
= last_call_insn ();
1777 add_function_usage_to (call_insn
, call_fusage
);
1779 /* Restore the stack. */
1780 if (targetm
.have_save_stack_nonlocal ())
1781 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1783 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1784 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1788 /* Return the address of the result block. */
1789 result
= copy_addr_to_reg (XEXP (result
, 0));
1790 return convert_memory_address (ptr_mode
, result
);
1793 /* Perform an untyped return. */
1796 expand_builtin_return (rtx result
)
1798 int size
, align
, regno
;
1799 fixed_size_mode mode
;
1801 rtx_insn
*call_fusage
= 0;
1803 result
= convert_memory_address (Pmode
, result
);
1805 apply_result_size ();
1806 result
= gen_rtx_MEM (BLKmode
, result
);
1808 if (targetm
.have_untyped_return ())
1810 rtx vector
= result_vector (0, result
);
1811 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1816 /* Restore the return value and note that each value is used. */
1818 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1819 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1821 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1822 if (size
% align
!= 0)
1823 size
= CEIL (size
, align
) * align
;
1824 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1825 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1827 push_to_sequence (call_fusage
);
1829 call_fusage
= get_insns ();
1831 size
+= GET_MODE_SIZE (mode
);
1834 /* Put the USE insns before the return. */
1835 emit_insn (call_fusage
);
1837 /* Return whatever values was restored by jumping directly to the end
1839 expand_naked_return ();
1842 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1844 static enum type_class
1845 type_to_class (tree type
)
1847 switch (TREE_CODE (type
))
1849 case VOID_TYPE
: return void_type_class
;
1850 case INTEGER_TYPE
: return integer_type_class
;
1851 case ENUMERAL_TYPE
: return enumeral_type_class
;
1852 case BOOLEAN_TYPE
: return boolean_type_class
;
1853 case POINTER_TYPE
: return pointer_type_class
;
1854 case REFERENCE_TYPE
: return reference_type_class
;
1855 case OFFSET_TYPE
: return offset_type_class
;
1856 case REAL_TYPE
: return real_type_class
;
1857 case COMPLEX_TYPE
: return complex_type_class
;
1858 case FUNCTION_TYPE
: return function_type_class
;
1859 case METHOD_TYPE
: return method_type_class
;
1860 case RECORD_TYPE
: return record_type_class
;
1862 case QUAL_UNION_TYPE
: return union_type_class
;
1863 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1864 ? string_type_class
: array_type_class
);
1865 case LANG_TYPE
: return lang_type_class
;
1866 case OPAQUE_TYPE
: return opaque_type_class
;
1867 default: return no_type_class
;
1871 /* Expand a call EXP to __builtin_classify_type. */
1874 expand_builtin_classify_type (tree exp
)
1876 if (call_expr_nargs (exp
))
1877 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1878 return GEN_INT (no_type_class
);
1881 /* This helper macro, meant to be used in mathfn_built_in below, determines
1882 which among a set of builtin math functions is appropriate for a given type
1883 mode. The `F' (float) and `L' (long double) are automatically generated
1884 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1885 types, there are additional types that are considered with 'F32', 'F64',
1886 'F128', etc. suffixes. */
1887 #define CASE_MATHFN(MATHFN) \
1888 CASE_CFN_##MATHFN: \
1889 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1890 fcodel = BUILT_IN_##MATHFN##L ; break;
1891 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1893 #define CASE_MATHFN_FLOATN(MATHFN) \
1894 CASE_CFN_##MATHFN: \
1895 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1896 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1897 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1898 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1899 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1901 /* Similar to above, but appends _R after any F/L suffix. */
1902 #define CASE_MATHFN_REENT(MATHFN) \
1903 case CFN_BUILT_IN_##MATHFN##_R: \
1904 case CFN_BUILT_IN_##MATHFN##F_R: \
1905 case CFN_BUILT_IN_##MATHFN##L_R: \
1906 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1907 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1909 /* Return a function equivalent to FN but operating on floating-point
1910 values of type TYPE, or END_BUILTINS if no such function exists.
1911 This is purely an operation on function codes; it does not guarantee
1912 that the target actually has an implementation of the function. */
1914 static built_in_function
1915 mathfn_built_in_2 (tree type
, combined_fn fn
)
1918 built_in_function fcode
, fcodef
, fcodel
;
1919 built_in_function fcodef16
= END_BUILTINS
;
1920 built_in_function fcodef32
= END_BUILTINS
;
1921 built_in_function fcodef64
= END_BUILTINS
;
1922 built_in_function fcodef128
= END_BUILTINS
;
1923 built_in_function fcodef32x
= END_BUILTINS
;
1924 built_in_function fcodef64x
= END_BUILTINS
;
1925 built_in_function fcodef128x
= END_BUILTINS
;
1929 #define SEQ_OF_CASE_MATHFN \
1930 CASE_MATHFN (ACOS) \
1931 CASE_MATHFN (ACOSH) \
1932 CASE_MATHFN (ASIN) \
1933 CASE_MATHFN (ASINH) \
1934 CASE_MATHFN (ATAN) \
1935 CASE_MATHFN (ATAN2) \
1936 CASE_MATHFN (ATANH) \
1937 CASE_MATHFN (CBRT) \
1938 CASE_MATHFN_FLOATN (CEIL) \
1939 CASE_MATHFN (CEXPI) \
1940 CASE_MATHFN_FLOATN (COPYSIGN) \
1942 CASE_MATHFN (COSH) \
1943 CASE_MATHFN (DREM) \
1945 CASE_MATHFN (ERFC) \
1947 CASE_MATHFN (EXP10) \
1948 CASE_MATHFN (EXP2) \
1949 CASE_MATHFN (EXPM1) \
1950 CASE_MATHFN (FABS) \
1951 CASE_MATHFN (FDIM) \
1952 CASE_MATHFN_FLOATN (FLOOR) \
1953 CASE_MATHFN_FLOATN (FMA) \
1954 CASE_MATHFN_FLOATN (FMAX) \
1955 CASE_MATHFN_FLOATN (FMIN) \
1956 CASE_MATHFN (FMOD) \
1957 CASE_MATHFN (FREXP) \
1958 CASE_MATHFN (GAMMA) \
1959 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1960 CASE_MATHFN (HUGE_VAL) \
1961 CASE_MATHFN (HYPOT) \
1962 CASE_MATHFN (ILOGB) \
1963 CASE_MATHFN (ICEIL) \
1964 CASE_MATHFN (IFLOOR) \
1966 CASE_MATHFN (IRINT) \
1967 CASE_MATHFN (IROUND) \
1968 CASE_MATHFN (ISINF) \
1972 CASE_MATHFN (LCEIL) \
1973 CASE_MATHFN (LDEXP) \
1974 CASE_MATHFN (LFLOOR) \
1975 CASE_MATHFN (LGAMMA) \
1976 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1977 CASE_MATHFN (LLCEIL) \
1978 CASE_MATHFN (LLFLOOR) \
1979 CASE_MATHFN (LLRINT) \
1980 CASE_MATHFN (LLROUND) \
1982 CASE_MATHFN (LOG10) \
1983 CASE_MATHFN (LOG1P) \
1984 CASE_MATHFN (LOG2) \
1985 CASE_MATHFN (LOGB) \
1986 CASE_MATHFN (LRINT) \
1987 CASE_MATHFN (LROUND) \
1988 CASE_MATHFN (MODF) \
1990 CASE_MATHFN (NANS) \
1991 CASE_MATHFN_FLOATN (NEARBYINT) \
1992 CASE_MATHFN (NEXTAFTER) \
1993 CASE_MATHFN (NEXTTOWARD) \
1995 CASE_MATHFN (POWI) \
1996 CASE_MATHFN (POW10) \
1997 CASE_MATHFN (REMAINDER) \
1998 CASE_MATHFN (REMQUO) \
1999 CASE_MATHFN_FLOATN (RINT) \
2000 CASE_MATHFN_FLOATN (ROUND) \
2001 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2002 CASE_MATHFN (SCALB) \
2003 CASE_MATHFN (SCALBLN) \
2004 CASE_MATHFN (SCALBN) \
2005 CASE_MATHFN (SIGNBIT) \
2006 CASE_MATHFN (SIGNIFICAND) \
2008 CASE_MATHFN (SINCOS) \
2009 CASE_MATHFN (SINH) \
2010 CASE_MATHFN_FLOATN (SQRT) \
2012 CASE_MATHFN (TANH) \
2013 CASE_MATHFN (TGAMMA) \
2014 CASE_MATHFN_FLOATN (TRUNC) \
2022 return END_BUILTINS
;
2025 mtype
= TYPE_MAIN_VARIANT (type
);
2026 if (mtype
== double_type_node
)
2028 else if (mtype
== float_type_node
)
2030 else if (mtype
== long_double_type_node
)
2032 else if (mtype
== float16_type_node
)
2034 else if (mtype
== float32_type_node
)
2036 else if (mtype
== float64_type_node
)
2038 else if (mtype
== float128_type_node
)
2040 else if (mtype
== float32x_type_node
)
2042 else if (mtype
== float64x_type_node
)
2044 else if (mtype
== float128x_type_node
)
2047 return END_BUILTINS
;
2051 #undef CASE_MATHFN_FLOATN
2052 #undef CASE_MATHFN_REENT
2054 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2055 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2056 otherwise use the explicit declaration. If we can't do the conversion,
2060 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2062 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2063 if (fcode2
== END_BUILTINS
)
2066 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2069 return builtin_decl_explicit (fcode2
);
2072 /* Like mathfn_built_in_1, but always use the implicit array. */
2075 mathfn_built_in (tree type
, combined_fn fn
)
2077 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2080 /* Like mathfn_built_in_1, but take a built_in_function and
2081 always use the implicit array. */
2084 mathfn_built_in (tree type
, enum built_in_function fn
)
2086 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2089 /* Return the type associated with a built in function, i.e., the one
2090 to be passed to mathfn_built_in to get the type-specific
2094 mathfn_built_in_type (combined_fn fn
)
2096 #define CASE_MATHFN(MATHFN) \
2097 case CFN_BUILT_IN_##MATHFN: \
2098 return double_type_node; \
2099 case CFN_BUILT_IN_##MATHFN##F: \
2100 return float_type_node; \
2101 case CFN_BUILT_IN_##MATHFN##L: \
2102 return long_double_type_node;
2104 #define CASE_MATHFN_FLOATN(MATHFN) \
2105 CASE_MATHFN(MATHFN) \
2106 case CFN_BUILT_IN_##MATHFN##F16: \
2107 return float16_type_node; \
2108 case CFN_BUILT_IN_##MATHFN##F32: \
2109 return float32_type_node; \
2110 case CFN_BUILT_IN_##MATHFN##F64: \
2111 return float64_type_node; \
2112 case CFN_BUILT_IN_##MATHFN##F128: \
2113 return float128_type_node; \
2114 case CFN_BUILT_IN_##MATHFN##F32X: \
2115 return float32x_type_node; \
2116 case CFN_BUILT_IN_##MATHFN##F64X: \
2117 return float64x_type_node; \
2118 case CFN_BUILT_IN_##MATHFN##F128X: \
2119 return float128x_type_node;
2121 /* Similar to above, but appends _R after any F/L suffix. */
2122 #define CASE_MATHFN_REENT(MATHFN) \
2123 case CFN_BUILT_IN_##MATHFN##_R: \
2124 return double_type_node; \
2125 case CFN_BUILT_IN_##MATHFN##F_R: \
2126 return float_type_node; \
2127 case CFN_BUILT_IN_##MATHFN##L_R: \
2128 return long_double_type_node;
2139 #undef CASE_MATHFN_FLOATN
2140 #undef CASE_MATHFN_REENT
2141 #undef SEQ_OF_CASE_MATHFN
2144 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2145 return its code, otherwise return IFN_LAST. Note that this function
2146 only tests whether the function is defined in internals.def, not whether
2147 it is actually available on the target. */
2150 associated_internal_fn (tree fndecl
)
2152 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2153 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2154 switch (DECL_FUNCTION_CODE (fndecl
))
2156 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2157 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2158 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2159 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2160 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2161 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2162 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2163 #include "internal-fn.def"
2165 CASE_FLT_FN (BUILT_IN_POW10
):
2168 CASE_FLT_FN (BUILT_IN_DREM
):
2169 return IFN_REMAINDER
;
2171 CASE_FLT_FN (BUILT_IN_SCALBN
):
2172 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2173 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2182 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2183 on the current target by a call to an internal function, return the
2184 code of that internal function, otherwise return IFN_LAST. The caller
2185 is responsible for ensuring that any side-effects of the built-in
2186 call are dealt with correctly. E.g. if CALL sets errno, the caller
2187 must decide that the errno result isn't needed or make it available
2188 in some other way. */
2191 replacement_internal_fn (gcall
*call
)
2193 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2195 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2196 if (ifn
!= IFN_LAST
)
2198 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2199 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2200 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2207 /* Expand a call to the builtin trinary math functions (fma).
2208 Return NULL_RTX if a normal call should be emitted rather than expanding the
2209 function in-line. EXP is the expression that is a call to the builtin
2210 function; if convenient, the result should be placed in TARGET.
2211 SUBTARGET may be used as the target for computing one of EXP's
2215 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2217 optab builtin_optab
;
2218 rtx op0
, op1
, op2
, result
;
2220 tree fndecl
= get_callee_fndecl (exp
);
2221 tree arg0
, arg1
, arg2
;
2224 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2227 arg0
= CALL_EXPR_ARG (exp
, 0);
2228 arg1
= CALL_EXPR_ARG (exp
, 1);
2229 arg2
= CALL_EXPR_ARG (exp
, 2);
2231 switch (DECL_FUNCTION_CODE (fndecl
))
2233 CASE_FLT_FN (BUILT_IN_FMA
):
2234 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2235 builtin_optab
= fma_optab
; break;
2240 /* Make a suitable register to place result in. */
2241 mode
= TYPE_MODE (TREE_TYPE (exp
));
2243 /* Before working hard, check whether the instruction is available. */
2244 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2247 result
= gen_reg_rtx (mode
);
2249 /* Always stabilize the argument list. */
2250 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2251 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2252 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2254 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2255 op1
= expand_normal (arg1
);
2256 op2
= expand_normal (arg2
);
2260 /* Compute into RESULT.
2261 Set RESULT to wherever the result comes back. */
2262 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2265 /* If we were unable to expand via the builtin, stop the sequence
2266 (without outputting the insns) and call to the library function
2267 with the stabilized argument list. */
2271 return expand_call (exp
, target
, target
== const0_rtx
);
2274 /* Output the entire sequence. */
2275 insns
= get_insns ();
2282 /* Expand a call to the builtin sin and cos math functions.
2283 Return NULL_RTX if a normal call should be emitted rather than expanding the
2284 function in-line. EXP is the expression that is a call to the builtin
2285 function; if convenient, the result should be placed in TARGET.
2286 SUBTARGET may be used as the target for computing one of EXP's
2290 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2292 optab builtin_optab
;
2295 tree fndecl
= get_callee_fndecl (exp
);
2299 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2302 arg
= CALL_EXPR_ARG (exp
, 0);
2304 switch (DECL_FUNCTION_CODE (fndecl
))
2306 CASE_FLT_FN (BUILT_IN_SIN
):
2307 CASE_FLT_FN (BUILT_IN_COS
):
2308 builtin_optab
= sincos_optab
; break;
2313 /* Make a suitable register to place result in. */
2314 mode
= TYPE_MODE (TREE_TYPE (exp
));
2316 /* Check if sincos insn is available, otherwise fallback
2317 to sin or cos insn. */
2318 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2319 switch (DECL_FUNCTION_CODE (fndecl
))
2321 CASE_FLT_FN (BUILT_IN_SIN
):
2322 builtin_optab
= sin_optab
; break;
2323 CASE_FLT_FN (BUILT_IN_COS
):
2324 builtin_optab
= cos_optab
; break;
2329 /* Before working hard, check whether the instruction is available. */
2330 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2332 rtx result
= gen_reg_rtx (mode
);
2334 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2335 need to expand the argument again. This way, we will not perform
2336 side-effects more the once. */
2337 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2339 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2343 /* Compute into RESULT.
2344 Set RESULT to wherever the result comes back. */
2345 if (builtin_optab
== sincos_optab
)
2349 switch (DECL_FUNCTION_CODE (fndecl
))
2351 CASE_FLT_FN (BUILT_IN_SIN
):
2352 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2354 CASE_FLT_FN (BUILT_IN_COS
):
2355 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2363 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2367 /* Output the entire sequence. */
2368 insns
= get_insns ();
2374 /* If we were unable to expand via the builtin, stop the sequence
2375 (without outputting the insns) and call to the library function
2376 with the stabilized argument list. */
2380 return expand_call (exp
, target
, target
== const0_rtx
);
2383 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2384 return an RTL instruction code that implements the functionality.
2385 If that isn't possible or available return CODE_FOR_nothing. */
2387 static enum insn_code
2388 interclass_mathfn_icode (tree arg
, tree fndecl
)
2390 bool errno_set
= false;
2391 optab builtin_optab
= unknown_optab
;
2394 switch (DECL_FUNCTION_CODE (fndecl
))
2396 CASE_FLT_FN (BUILT_IN_ILOGB
):
2397 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2398 CASE_FLT_FN (BUILT_IN_ISINF
):
2399 builtin_optab
= isinf_optab
; break;
2400 case BUILT_IN_ISNORMAL
:
2401 case BUILT_IN_ISFINITE
:
2402 CASE_FLT_FN (BUILT_IN_FINITE
):
2403 case BUILT_IN_FINITED32
:
2404 case BUILT_IN_FINITED64
:
2405 case BUILT_IN_FINITED128
:
2406 case BUILT_IN_ISINFD32
:
2407 case BUILT_IN_ISINFD64
:
2408 case BUILT_IN_ISINFD128
:
2409 /* These builtins have no optabs (yet). */
2415 /* There's no easy way to detect the case we need to set EDOM. */
2416 if (flag_errno_math
&& errno_set
)
2417 return CODE_FOR_nothing
;
2419 /* Optab mode depends on the mode of the input argument. */
2420 mode
= TYPE_MODE (TREE_TYPE (arg
));
2423 return optab_handler (builtin_optab
, mode
);
2424 return CODE_FOR_nothing
;
2427 /* Expand a call to one of the builtin math functions that operate on
2428 floating point argument and output an integer result (ilogb, isinf,
2430 Return 0 if a normal call should be emitted rather than expanding the
2431 function in-line. EXP is the expression that is a call to the builtin
2432 function; if convenient, the result should be placed in TARGET. */
2435 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2437 enum insn_code icode
= CODE_FOR_nothing
;
2439 tree fndecl
= get_callee_fndecl (exp
);
2443 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2446 arg
= CALL_EXPR_ARG (exp
, 0);
2447 icode
= interclass_mathfn_icode (arg
, fndecl
);
2448 mode
= TYPE_MODE (TREE_TYPE (arg
));
2450 if (icode
!= CODE_FOR_nothing
)
2452 class expand_operand ops
[1];
2453 rtx_insn
*last
= get_last_insn ();
2454 tree orig_arg
= arg
;
2456 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2457 need to expand the argument again. This way, we will not perform
2458 side-effects more the once. */
2459 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2461 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2463 if (mode
!= GET_MODE (op0
))
2464 op0
= convert_to_mode (mode
, op0
, 0);
2466 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2467 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2468 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2469 return ops
[0].value
;
2471 delete_insns_since (last
);
2472 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2478 /* Expand a call to the builtin sincos math function.
2479 Return NULL_RTX if a normal call should be emitted rather than expanding the
2480 function in-line. EXP is the expression that is a call to the builtin
2484 expand_builtin_sincos (tree exp
)
2486 rtx op0
, op1
, op2
, target1
, target2
;
2488 tree arg
, sinp
, cosp
;
2490 location_t loc
= EXPR_LOCATION (exp
);
2491 tree alias_type
, alias_off
;
2493 if (!validate_arglist (exp
, REAL_TYPE
,
2494 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2497 arg
= CALL_EXPR_ARG (exp
, 0);
2498 sinp
= CALL_EXPR_ARG (exp
, 1);
2499 cosp
= CALL_EXPR_ARG (exp
, 2);
2501 /* Make a suitable register to place result in. */
2502 mode
= TYPE_MODE (TREE_TYPE (arg
));
2504 /* Check if sincos insn is available, otherwise emit the call. */
2505 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2508 target1
= gen_reg_rtx (mode
);
2509 target2
= gen_reg_rtx (mode
);
2511 op0
= expand_normal (arg
);
2512 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2513 alias_off
= build_int_cst (alias_type
, 0);
2514 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2516 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2519 /* Compute into target1 and target2.
2520 Set TARGET to wherever the result comes back. */
2521 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2522 gcc_assert (result
);
2524 /* Move target1 and target2 to the memory locations indicated
2526 emit_move_insn (op1
, target1
);
2527 emit_move_insn (op2
, target2
);
2532 /* Expand a call to the internal cexpi builtin to the sincos math function.
2533 EXP is the expression that is a call to the builtin function; if convenient,
2534 the result should be placed in TARGET. */
2537 expand_builtin_cexpi (tree exp
, rtx target
)
2539 tree fndecl
= get_callee_fndecl (exp
);
2543 location_t loc
= EXPR_LOCATION (exp
);
2545 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2548 arg
= CALL_EXPR_ARG (exp
, 0);
2549 type
= TREE_TYPE (arg
);
2550 mode
= TYPE_MODE (TREE_TYPE (arg
));
2552 /* Try expanding via a sincos optab, fall back to emitting a libcall
2553 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2554 is only generated from sincos, cexp or if we have either of them. */
2555 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2557 op1
= gen_reg_rtx (mode
);
2558 op2
= gen_reg_rtx (mode
);
2560 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2562 /* Compute into op1 and op2. */
2563 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2565 else if (targetm
.libc_has_function (function_sincos
, type
))
2567 tree call
, fn
= NULL_TREE
;
2571 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2572 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2573 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2574 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2575 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2576 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2580 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2581 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2582 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2583 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2584 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2585 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2587 /* Make sure not to fold the sincos call again. */
2588 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2589 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2590 call
, 3, arg
, top1
, top2
));
2594 tree call
, fn
= NULL_TREE
, narg
;
2595 tree ctype
= build_complex_type (type
);
2597 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2598 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2599 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2600 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2601 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2602 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2606 /* If we don't have a decl for cexp create one. This is the
2607 friendliest fallback if the user calls __builtin_cexpi
2608 without full target C99 function support. */
2609 if (fn
== NULL_TREE
)
2612 const char *name
= NULL
;
2614 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2616 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2618 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2621 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2622 fn
= build_fn_decl (name
, fntype
);
2625 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2626 build_real (type
, dconst0
), arg
);
2628 /* Make sure not to fold the cexp call again. */
2629 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2630 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2631 target
, VOIDmode
, EXPAND_NORMAL
);
2634 /* Now build the proper return type. */
2635 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2636 make_tree (TREE_TYPE (arg
), op2
),
2637 make_tree (TREE_TYPE (arg
), op1
)),
2638 target
, VOIDmode
, EXPAND_NORMAL
);
2641 /* Conveniently construct a function call expression. FNDECL names the
2642 function to be called, N is the number of arguments, and the "..."
2643 parameters are the argument expressions. Unlike build_call_exr
2644 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2647 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2650 tree fntype
= TREE_TYPE (fndecl
);
2651 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2654 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2656 SET_EXPR_LOCATION (fn
, loc
);
2660 /* Expand a call to one of the builtin rounding functions gcc defines
2661 as an extension (lfloor and lceil). As these are gcc extensions we
2662 do not need to worry about setting errno to EDOM.
2663 If expanding via optab fails, lower expression to (int)(floor(x)).
2664 EXP is the expression that is a call to the builtin function;
2665 if convenient, the result should be placed in TARGET. */
2668 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2670 convert_optab builtin_optab
;
2673 tree fndecl
= get_callee_fndecl (exp
);
2674 enum built_in_function fallback_fn
;
2675 tree fallback_fndecl
;
2679 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2682 arg
= CALL_EXPR_ARG (exp
, 0);
2684 switch (DECL_FUNCTION_CODE (fndecl
))
2686 CASE_FLT_FN (BUILT_IN_ICEIL
):
2687 CASE_FLT_FN (BUILT_IN_LCEIL
):
2688 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2689 builtin_optab
= lceil_optab
;
2690 fallback_fn
= BUILT_IN_CEIL
;
2693 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2694 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2695 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2696 builtin_optab
= lfloor_optab
;
2697 fallback_fn
= BUILT_IN_FLOOR
;
2704 /* Make a suitable register to place result in. */
2705 mode
= TYPE_MODE (TREE_TYPE (exp
));
2707 target
= gen_reg_rtx (mode
);
2709 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2710 need to expand the argument again. This way, we will not perform
2711 side-effects more the once. */
2712 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2714 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2718 /* Compute into TARGET. */
2719 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2721 /* Output the entire sequence. */
2722 insns
= get_insns ();
2728 /* If we were unable to expand via the builtin, stop the sequence
2729 (without outputting the insns). */
2732 /* Fall back to floating point rounding optab. */
2733 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2735 /* For non-C99 targets we may end up without a fallback fndecl here
2736 if the user called __builtin_lfloor directly. In this case emit
2737 a call to the floor/ceil variants nevertheless. This should result
2738 in the best user experience for not full C99 targets. */
2739 if (fallback_fndecl
== NULL_TREE
)
2742 const char *name
= NULL
;
2744 switch (DECL_FUNCTION_CODE (fndecl
))
2746 case BUILT_IN_ICEIL
:
2747 case BUILT_IN_LCEIL
:
2748 case BUILT_IN_LLCEIL
:
2751 case BUILT_IN_ICEILF
:
2752 case BUILT_IN_LCEILF
:
2753 case BUILT_IN_LLCEILF
:
2756 case BUILT_IN_ICEILL
:
2757 case BUILT_IN_LCEILL
:
2758 case BUILT_IN_LLCEILL
:
2761 case BUILT_IN_IFLOOR
:
2762 case BUILT_IN_LFLOOR
:
2763 case BUILT_IN_LLFLOOR
:
2766 case BUILT_IN_IFLOORF
:
2767 case BUILT_IN_LFLOORF
:
2768 case BUILT_IN_LLFLOORF
:
2771 case BUILT_IN_IFLOORL
:
2772 case BUILT_IN_LFLOORL
:
2773 case BUILT_IN_LLFLOORL
:
2780 fntype
= build_function_type_list (TREE_TYPE (arg
),
2781 TREE_TYPE (arg
), NULL_TREE
);
2782 fallback_fndecl
= build_fn_decl (name
, fntype
);
2785 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2787 tmp
= expand_normal (exp
);
2788 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2790 /* Truncate the result of floating point optab to integer
2791 via expand_fix (). */
2792 target
= gen_reg_rtx (mode
);
2793 expand_fix (target
, tmp
, 0);
2798 /* Expand a call to one of the builtin math functions doing integer
2800 Return 0 if a normal call should be emitted rather than expanding the
2801 function in-line. EXP is the expression that is a call to the builtin
2802 function; if convenient, the result should be placed in TARGET. */
2805 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2807 convert_optab builtin_optab
;
2810 tree fndecl
= get_callee_fndecl (exp
);
2813 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2815 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2818 arg
= CALL_EXPR_ARG (exp
, 0);
2820 switch (DECL_FUNCTION_CODE (fndecl
))
2822 CASE_FLT_FN (BUILT_IN_IRINT
):
2823 fallback_fn
= BUILT_IN_LRINT
;
2825 CASE_FLT_FN (BUILT_IN_LRINT
):
2826 CASE_FLT_FN (BUILT_IN_LLRINT
):
2827 builtin_optab
= lrint_optab
;
2830 CASE_FLT_FN (BUILT_IN_IROUND
):
2831 fallback_fn
= BUILT_IN_LROUND
;
2833 CASE_FLT_FN (BUILT_IN_LROUND
):
2834 CASE_FLT_FN (BUILT_IN_LLROUND
):
2835 builtin_optab
= lround_optab
;
2842 /* There's no easy way to detect the case we need to set EDOM. */
2843 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2846 /* Make a suitable register to place result in. */
2847 mode
= TYPE_MODE (TREE_TYPE (exp
));
2849 /* There's no easy way to detect the case we need to set EDOM. */
2850 if (!flag_errno_math
)
2852 rtx result
= gen_reg_rtx (mode
);
2854 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2855 need to expand the argument again. This way, we will not perform
2856 side-effects more the once. */
2857 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2859 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2863 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2865 /* Output the entire sequence. */
2866 insns
= get_insns ();
2872 /* If we were unable to expand via the builtin, stop the sequence
2873 (without outputting the insns) and call to the library function
2874 with the stabilized argument list. */
2878 if (fallback_fn
!= BUILT_IN_NONE
)
2880 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2881 targets, (int) round (x) should never be transformed into
2882 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2883 a call to lround in the hope that the target provides at least some
2884 C99 functions. This should result in the best user experience for
2885 not full C99 targets. */
2886 tree fallback_fndecl
= mathfn_built_in_1
2887 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2889 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2890 fallback_fndecl
, 1, arg
);
2892 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2893 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2894 return convert_to_mode (mode
, target
, 0);
2897 return expand_call (exp
, target
, target
== const0_rtx
);
2900 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2901 a normal call should be emitted rather than expanding the function
2902 in-line. EXP is the expression that is a call to the builtin
2903 function; if convenient, the result should be placed in TARGET. */
2906 expand_builtin_powi (tree exp
, rtx target
)
2913 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2916 arg0
= CALL_EXPR_ARG (exp
, 0);
2917 arg1
= CALL_EXPR_ARG (exp
, 1);
2918 mode
= TYPE_MODE (TREE_TYPE (exp
));
2920 /* Emit a libcall to libgcc. */
2922 /* Mode of the 2nd argument must match that of an int. */
2923 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2925 if (target
== NULL_RTX
)
2926 target
= gen_reg_rtx (mode
);
2928 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2929 if (GET_MODE (op0
) != mode
)
2930 op0
= convert_to_mode (mode
, op0
, 0);
2931 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2932 if (GET_MODE (op1
) != mode2
)
2933 op1
= convert_to_mode (mode2
, op1
, 0);
2935 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2936 target
, LCT_CONST
, mode
,
2937 op0
, mode
, op1
, mode2
);
2942 /* Expand expression EXP which is a call to the strlen builtin. Return
2943 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2944 try to get the result in TARGET, if convenient. */
2947 expand_builtin_strlen (tree exp
, rtx target
,
2948 machine_mode target_mode
)
2950 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2953 tree src
= CALL_EXPR_ARG (exp
, 0);
2955 /* If the length can be computed at compile-time, return it. */
2956 if (tree len
= c_strlen (src
, 0))
2957 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2959 /* If the length can be computed at compile-time and is constant
2960 integer, but there are side-effects in src, evaluate
2961 src for side-effects, then return len.
2962 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2963 can be optimized into: i++; x = 3; */
2964 tree len
= c_strlen (src
, 1);
2965 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2967 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2968 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2971 unsigned int align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2973 /* If SRC is not a pointer type, don't do this operation inline. */
2977 /* Bail out if we can't compute strlen in the right mode. */
2978 machine_mode insn_mode
;
2979 enum insn_code icode
= CODE_FOR_nothing
;
2980 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2982 icode
= optab_handler (strlen_optab
, insn_mode
);
2983 if (icode
!= CODE_FOR_nothing
)
2986 if (insn_mode
== VOIDmode
)
2989 /* Make a place to hold the source address. We will not expand
2990 the actual source until we are sure that the expansion will
2991 not fail -- there are trees that cannot be expanded twice. */
2992 rtx src_reg
= gen_reg_rtx (Pmode
);
2994 /* Mark the beginning of the strlen sequence so we can emit the
2995 source operand later. */
2996 rtx_insn
*before_strlen
= get_last_insn ();
2998 class expand_operand ops
[4];
2999 create_output_operand (&ops
[0], target
, insn_mode
);
3000 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3001 create_integer_operand (&ops
[2], 0);
3002 create_integer_operand (&ops
[3], align
);
3003 if (!maybe_expand_insn (icode
, 4, ops
))
3006 /* Check to see if the argument was declared attribute nonstring
3007 and if so, issue a warning since at this point it's not known
3008 to be nul-terminated. */
3009 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3011 /* Now that we are assured of success, expand the source. */
3013 rtx pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3016 #ifdef POINTERS_EXTEND_UNSIGNED
3017 if (GET_MODE (pat
) != Pmode
)
3018 pat
= convert_to_mode (Pmode
, pat
,
3019 POINTERS_EXTEND_UNSIGNED
);
3021 emit_move_insn (src_reg
, pat
);
3027 emit_insn_after (pat
, before_strlen
);
3029 emit_insn_before (pat
, get_insns ());
3031 /* Return the value in the proper mode for this function. */
3032 if (GET_MODE (ops
[0].value
) == target_mode
)
3033 target
= ops
[0].value
;
3034 else if (target
!= 0)
3035 convert_move (target
, ops
[0].value
, 0);
3037 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3042 /* Expand call EXP to the strnlen built-in, returning the result
3043 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3046 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3048 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3051 tree src
= CALL_EXPR_ARG (exp
, 0);
3052 tree bound
= CALL_EXPR_ARG (exp
, 1);
3057 location_t loc
= UNKNOWN_LOCATION
;
3058 if (EXPR_HAS_LOCATION (exp
))
3059 loc
= EXPR_LOCATION (exp
);
3061 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3062 so these conversions aren't necessary. */
3063 c_strlen_data lendata
= { };
3064 tree len
= c_strlen (src
, 0, &lendata
, 1);
3066 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3068 if (TREE_CODE (bound
) == INTEGER_CST
)
3073 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3074 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3077 if (TREE_CODE (bound
) != SSA_NAME
)
3082 get_global_range_query ()->range_of_expr (r
, bound
);
3083 if (r
.kind () != VR_RANGE
)
3085 min
= r
.lower_bound ();
3086 max
= r
.upper_bound ();
3088 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3091 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3099 if (wi::gtu_p (min
, wi::to_wide (len
)))
3100 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3102 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3103 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3106 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3107 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3108 a target constant. */
3111 builtin_memcpy_read_str (void *data
, void *, HOST_WIDE_INT offset
,
3112 fixed_size_mode mode
)
3114 /* The REPresentation pointed to by DATA need not be a nul-terminated
3115 string but the caller guarantees it's large enough for MODE. */
3116 const char *rep
= (const char *) data
;
3118 /* The by-pieces infrastructure does not try to pick a vector mode
3119 for memcpy expansion. */
3120 return c_readstr (rep
+ offset
, as_a
<scalar_int_mode
> (mode
),
3121 /*nul_terminated=*/false);
3124 /* LEN specify length of the block of memcpy/memset operation.
3125 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3126 In some cases we can make very likely guess on max size, then we
3127 set it into PROBABLE_MAX_SIZE. */
3130 determine_block_size (tree len
, rtx len_rtx
,
3131 unsigned HOST_WIDE_INT
*min_size
,
3132 unsigned HOST_WIDE_INT
*max_size
,
3133 unsigned HOST_WIDE_INT
*probable_max_size
)
3135 if (CONST_INT_P (len_rtx
))
3137 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3143 enum value_range_kind range_type
= VR_UNDEFINED
;
3145 /* Determine bounds from the type. */
3146 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3147 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3150 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3151 *probable_max_size
= *max_size
3152 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3154 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3156 if (TREE_CODE (len
) == SSA_NAME
)
3159 get_global_range_query ()->range_of_expr (r
, len
);
3160 range_type
= r
.kind ();
3161 if (range_type
!= VR_UNDEFINED
)
3163 min
= wi::to_wide (r
.min ());
3164 max
= wi::to_wide (r
.max ());
3167 if (range_type
== VR_RANGE
)
3169 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3170 *min_size
= min
.to_uhwi ();
3171 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3172 *probable_max_size
= *max_size
= max
.to_uhwi ();
3174 else if (range_type
== VR_ANTI_RANGE
)
3182 Produce anti range allowing negative values of N. We still
3183 can use the information and make a guess that N is not negative.
3185 if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3186 *probable_max_size
= min
.to_uhwi () - 1;
3189 gcc_checking_assert (*max_size
<=
3190 (unsigned HOST_WIDE_INT
)
3191 GET_MODE_MASK (GET_MODE (len_rtx
)));
3194 /* Expand a call EXP to the memcpy builtin.
3195 Return NULL_RTX if we failed, the caller should emit a normal call,
3196 otherwise try to get the result in TARGET, if convenient (and in
3197 mode MODE if that's convenient). */
3200 expand_builtin_memcpy (tree exp
, rtx target
)
3202 if (!validate_arglist (exp
,
3203 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3206 tree dest
= CALL_EXPR_ARG (exp
, 0);
3207 tree src
= CALL_EXPR_ARG (exp
, 1);
3208 tree len
= CALL_EXPR_ARG (exp
, 2);
3210 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3211 /*retmode=*/ RETURN_BEGIN
, false);
3214 /* Check a call EXP to the memmove built-in for validity.
3215 Return NULL_RTX on both success and failure. */
3218 expand_builtin_memmove (tree exp
, rtx target
)
3220 if (!validate_arglist (exp
,
3221 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3224 tree dest
= CALL_EXPR_ARG (exp
, 0);
3225 tree src
= CALL_EXPR_ARG (exp
, 1);
3226 tree len
= CALL_EXPR_ARG (exp
, 2);
3228 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3229 /*retmode=*/ RETURN_BEGIN
, true);
3232 /* Expand a call EXP to the mempcpy builtin.
3233 Return NULL_RTX if we failed; the caller should emit a normal call,
3234 otherwise try to get the result in TARGET, if convenient (and in
3235 mode MODE if that's convenient). */
3238 expand_builtin_mempcpy (tree exp
, rtx target
)
3240 if (!validate_arglist (exp
,
3241 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3244 tree dest
= CALL_EXPR_ARG (exp
, 0);
3245 tree src
= CALL_EXPR_ARG (exp
, 1);
3246 tree len
= CALL_EXPR_ARG (exp
, 2);
3248 /* Policy does not generally allow using compute_objsize (which
3249 is used internally by check_memop_size) to change code generation
3250 or drive optimization decisions.
3252 In this instance it is safe because the code we generate has
3253 the same semantics regardless of the return value of
3254 check_memop_sizes. Exactly the same amount of data is copied
3255 and the return value is exactly the same in both cases.
3257 Furthermore, check_memop_size always uses mode 0 for the call to
3258 compute_objsize, so the imprecise nature of compute_objsize is
3261 /* Avoid expanding mempcpy into memcpy when the call is determined
3262 to overflow the buffer. This also prevents the same overflow
3263 from being diagnosed again when expanding memcpy. */
3265 return expand_builtin_mempcpy_args (dest
, src
, len
,
3266 target
, exp
, /*retmode=*/ RETURN_END
);
3269 /* Helper function to do the actual work for expand of memory copy family
3270 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3271 of memory from SRC to DEST and assign to TARGET if convenient. Return
3272 value is based on RETMODE argument. */
3275 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3276 rtx target
, tree exp
, memop_ret retmode
,
3279 unsigned int src_align
= get_pointer_alignment (src
);
3280 unsigned int dest_align
= get_pointer_alignment (dest
);
3281 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3282 HOST_WIDE_INT expected_size
= -1;
3283 unsigned int expected_align
= 0;
3284 unsigned HOST_WIDE_INT min_size
;
3285 unsigned HOST_WIDE_INT max_size
;
3286 unsigned HOST_WIDE_INT probable_max_size
;
3290 /* If DEST is not a pointer type, call the normal function. */
3291 if (dest_align
== 0)
3294 /* If either SRC is not a pointer type, don't do this
3295 operation in-line. */
3299 if (currently_expanding_gimple_stmt
)
3300 stringop_block_profile (currently_expanding_gimple_stmt
,
3301 &expected_align
, &expected_size
);
3303 if (expected_align
< dest_align
)
3304 expected_align
= dest_align
;
3305 dest_mem
= get_memory_rtx (dest
, len
);
3306 set_mem_align (dest_mem
, dest_align
);
3307 len_rtx
= expand_normal (len
);
3308 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3309 &probable_max_size
);
3311 /* Try to get the byte representation of the constant SRC points to,
3312 with its byte size in NBYTES. */
3313 unsigned HOST_WIDE_INT nbytes
;
3314 const char *rep
= getbyterep (src
, &nbytes
);
3316 /* If the function's constant bound LEN_RTX is less than or equal
3317 to the byte size of the representation of the constant argument,
3318 and if block move would be done by pieces, we can avoid loading
3319 the bytes from memory and only store the computed constant.
3320 This works in the overlap (memmove) case as well because
3321 store_by_pieces just generates a series of stores of constants
3322 from the representation returned by getbyterep(). */
3324 && CONST_INT_P (len_rtx
)
3325 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
3326 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3327 CONST_CAST (char *, rep
),
3330 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3331 builtin_memcpy_read_str
,
3332 CONST_CAST (char *, rep
),
3333 dest_align
, false, retmode
);
3334 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3335 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3339 src_mem
= get_memory_rtx (src
, len
);
3340 set_mem_align (src_mem
, src_align
);
3342 /* Copy word part most expediently. */
3343 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3344 if (CALL_EXPR_TAILCALL (exp
)
3345 && (retmode
== RETURN_BEGIN
|| target
== const0_rtx
))
3346 method
= BLOCK_OP_TAILCALL
;
3347 bool use_mempcpy_call
= (targetm
.libc_has_fast_function (BUILT_IN_MEMPCPY
)
3348 && retmode
== RETURN_END
3350 && target
!= const0_rtx
);
3351 if (use_mempcpy_call
)
3352 method
= BLOCK_OP_NO_LIBCALL_RET
;
3353 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3354 expected_align
, expected_size
,
3355 min_size
, max_size
, probable_max_size
,
3356 use_mempcpy_call
, &is_move_done
,
3359 /* Bail out when a mempcpy call would be expanded as libcall and when
3360 we have a target that provides a fast implementation
3361 of mempcpy routine. */
3365 if (dest_addr
== pc_rtx
)
3370 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3371 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3374 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3376 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3377 /* stpcpy pointer to last byte. */
3378 if (retmode
== RETURN_END_MINUS_ONE
)
3379 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3386 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3387 rtx target
, tree orig_exp
, memop_ret retmode
)
3389 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3393 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3394 we failed, the caller should emit a normal call, otherwise try to
3395 get the result in TARGET, if convenient.
3396 Return value is based on RETMODE argument. */
3399 expand_movstr (tree dest
, tree src
, rtx target
, memop_ret retmode
)
3401 class expand_operand ops
[3];
3405 if (!targetm
.have_movstr ())
3408 dest_mem
= get_memory_rtx (dest
, NULL
);
3409 src_mem
= get_memory_rtx (src
, NULL
);
3410 if (retmode
== RETURN_BEGIN
)
3412 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3413 dest_mem
= replace_equiv_address (dest_mem
, target
);
3416 create_output_operand (&ops
[0],
3417 retmode
!= RETURN_BEGIN
? target
: NULL_RTX
, Pmode
);
3418 create_fixed_operand (&ops
[1], dest_mem
);
3419 create_fixed_operand (&ops
[2], src_mem
);
3420 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3423 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3425 target
= ops
[0].value
;
3426 /* movstr is supposed to set end to the address of the NUL
3427 terminator. If the caller requested a mempcpy-like return value,
3429 if (retmode
== RETURN_END
)
3431 rtx tem
= plus_constant (GET_MODE (target
),
3432 gen_lowpart (GET_MODE (target
), target
), 1);
3433 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3439 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3440 NULL_RTX if we failed the caller should emit a normal call, otherwise
3441 try to get the result in TARGET, if convenient (and in mode MODE if that's
3445 expand_builtin_strcpy (tree exp
, rtx target
)
3447 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3450 tree dest
= CALL_EXPR_ARG (exp
, 0);
3451 tree src
= CALL_EXPR_ARG (exp
, 1);
3453 return expand_builtin_strcpy_args (exp
, dest
, src
, target
);
3456 /* Helper function to do the actual work for expand_builtin_strcpy. The
3457 arguments to the builtin_strcpy call DEST and SRC are broken out
3458 so that this can also be called without constructing an actual CALL_EXPR.
3459 The other arguments and return value are the same as for
3460 expand_builtin_strcpy. */
3463 expand_builtin_strcpy_args (tree
, tree dest
, tree src
, rtx target
)
3465 return expand_movstr (dest
, src
, target
, /*retmode=*/ RETURN_BEGIN
);
3468 /* Expand a call EXP to the stpcpy builtin.
3469 Return NULL_RTX if we failed the caller should emit a normal call,
3470 otherwise try to get the result in TARGET, if convenient (and in
3471 mode MODE if that's convenient). */
3474 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
3477 location_t loc
= EXPR_LOCATION (exp
);
3479 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3482 dst
= CALL_EXPR_ARG (exp
, 0);
3483 src
= CALL_EXPR_ARG (exp
, 1);
3485 /* If return value is ignored, transform stpcpy into strcpy. */
3486 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3488 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3489 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3490 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3497 /* Ensure we get an actual string whose length can be evaluated at
3498 compile-time, not an expression containing a string. This is
3499 because the latter will potentially produce pessimized code
3500 when used to produce the return value. */
3501 c_strlen_data lendata
= { };
3503 || !(len
= c_strlen (src
, 0, &lendata
, 1)))
3504 return expand_movstr (dst
, src
, target
,
3505 /*retmode=*/ RETURN_END_MINUS_ONE
);
3507 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3508 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3510 /*retmode=*/ RETURN_END_MINUS_ONE
);
3515 if (TREE_CODE (len
) == INTEGER_CST
)
3517 rtx len_rtx
= expand_normal (len
);
3519 if (CONST_INT_P (len_rtx
))
3521 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
3527 if (mode
!= VOIDmode
)
3528 target
= gen_reg_rtx (mode
);
3530 target
= gen_reg_rtx (GET_MODE (ret
));
3532 if (GET_MODE (target
) != GET_MODE (ret
))
3533 ret
= gen_lowpart (GET_MODE (target
), ret
);
3535 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3536 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3544 return expand_movstr (dst
, src
, target
,
3545 /*retmode=*/ RETURN_END_MINUS_ONE
);
3549 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3550 arguments while being careful to avoid duplicate warnings (which could
3551 be issued if the expander were to expand the call, resulting in it
3552 being emitted in expand_call(). */
3555 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3557 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
3559 /* The call has been successfully expanded. Check for nonstring
3560 arguments and issue warnings as appropriate. */
3561 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3568 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3569 bytes from constant string DATA + OFFSET and return it as target
3573 builtin_strncpy_read_str (void *data
, void *, HOST_WIDE_INT offset
,
3574 fixed_size_mode mode
)
3576 const char *str
= (const char *) data
;
3578 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3581 /* The by-pieces infrastructure does not try to pick a vector mode
3582 for strncpy expansion. */
3583 return c_readstr (str
+ offset
, as_a
<scalar_int_mode
> (mode
));
3586 /* Helper to check the sizes of sequences and the destination of calls
3587 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3588 success (no overflow or invalid sizes), false otherwise. */
3591 check_strncat_sizes (tree exp
, tree objsize
)
3593 tree dest
= CALL_EXPR_ARG (exp
, 0);
3594 tree src
= CALL_EXPR_ARG (exp
, 1);
3595 tree maxread
= CALL_EXPR_ARG (exp
, 2);
3597 /* Try to determine the range of lengths that the source expression
3599 c_strlen_data lendata
= { };
3600 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
3602 /* Try to verify that the destination is big enough for the shortest
3605 access_data
data (exp
, access_read_write
, maxread
, true);
3606 if (!objsize
&& warn_stringop_overflow
)
3608 /* If it hasn't been provided by __strncat_chk, try to determine
3609 the size of the destination object into which the source is
3611 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1, &data
.dst
);
3614 /* Add one for the terminating nul. */
3615 tree srclen
= (lendata
.minlen
3616 ? fold_build2 (PLUS_EXPR
, size_type_node
, lendata
.minlen
,
3620 /* The strncat function copies at most MAXREAD bytes and always appends
3621 the terminating nul so the specified upper bound should never be equal
3622 to (or greater than) the size of the destination. */
3623 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
3624 && tree_int_cst_equal (objsize
, maxread
))
3626 location_t loc
= EXPR_LOCATION (exp
);
3627 warning_at (loc
, OPT_Wstringop_overflow_
,
3628 "%qD specified bound %E equals destination size",
3629 get_callee_fndecl (exp
), maxread
);
3635 || (maxread
&& tree_fits_uhwi_p (maxread
)
3636 && tree_fits_uhwi_p (srclen
)
3637 && tree_int_cst_lt (maxread
, srclen
)))
3640 /* The number of bytes to write is LEN but check_access will alsoa
3641 check SRCLEN if LEN's value isn't known. */
3642 return check_access (exp
, /*dstwrite=*/NULL_TREE
, maxread
, srclen
,
3643 objsize
, data
.mode
, &data
);
3646 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3647 NULL_RTX if we failed the caller should emit a normal call. */
3650 expand_builtin_strncpy (tree exp
, rtx target
)
3652 location_t loc
= EXPR_LOCATION (exp
);
3654 if (!validate_arglist (exp
,
3655 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3657 tree dest
= CALL_EXPR_ARG (exp
, 0);
3658 tree src
= CALL_EXPR_ARG (exp
, 1);
3659 /* The number of bytes to write (not the maximum). */
3660 tree len
= CALL_EXPR_ARG (exp
, 2);
3662 /* The length of the source sequence. */
3663 tree slen
= c_strlen (src
, 1);
3665 /* We must be passed a constant len and src parameter. */
3666 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3669 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3671 /* We're required to pad with trailing zeros if the requested
3672 len is greater than strlen(s2)+1. In that case try to
3673 use store_by_pieces, if it fails, punt. */
3674 if (tree_int_cst_lt (slen
, len
))
3676 unsigned int dest_align
= get_pointer_alignment (dest
);
3677 const char *p
= c_getstr (src
);
3680 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3681 || !can_store_by_pieces (tree_to_uhwi (len
),
3682 builtin_strncpy_read_str
,
3683 CONST_CAST (char *, p
),
3687 dest_mem
= get_memory_rtx (dest
, len
);
3688 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3689 builtin_strncpy_read_str
,
3690 CONST_CAST (char *, p
), dest_align
, false,
3692 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3693 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3700 /* Return the RTL of a register in MODE generated from PREV in the
3701 previous iteration. */
3704 gen_memset_value_from_prev (by_pieces_prev
*prev
, fixed_size_mode mode
)
3706 rtx target
= nullptr;
3707 if (prev
!= nullptr && prev
->data
!= nullptr)
3709 /* Use the previous data in the same mode. */
3710 if (prev
->mode
== mode
)
3713 fixed_size_mode prev_mode
= prev
->mode
;
3715 /* Don't use the previous data to write QImode if it is in a
3717 if (VECTOR_MODE_P (prev_mode
) && mode
== QImode
)
3720 rtx prev_rtx
= prev
->data
;
3722 if (REG_P (prev_rtx
)
3723 && HARD_REGISTER_P (prev_rtx
)
3724 && lowpart_subreg_regno (REGNO (prev_rtx
), prev_mode
, mode
) < 0)
3726 /* This case occurs when PREV_MODE is a vector and when
3727 MODE is too small to store using vector operations.
3728 After register allocation, the code will need to move the
3729 lowpart of the vector register into a non-vector register.
3731 Also, the target has chosen to use a hard register
3732 instead of going with the default choice of using a
3733 pseudo register. We should respect that choice and try to
3734 avoid creating a pseudo register with the same mode as the
3735 current hard register.
3737 In principle, we could just use a lowpart MODE subreg of
3738 the vector register. However, the vector register mode might
3739 be too wide for non-vector registers, and we already know
3740 that the non-vector mode is too small for vector registers.
3741 It's therefore likely that we'd need to spill to memory in
3742 the vector mode and reload the non-vector value from there.
3744 Try to avoid that by reducing the vector register to the
3745 smallest size that it can hold. This should increase the
3746 chances that non-vector registers can hold both the inner
3747 and outer modes of the subreg that we generate later. */
3749 fixed_size_mode candidate
;
3750 FOR_EACH_MODE_IN_CLASS (m
, GET_MODE_CLASS (mode
))
3751 if (is_a
<fixed_size_mode
> (m
, &candidate
))
3753 if (GET_MODE_SIZE (candidate
)
3754 >= GET_MODE_SIZE (prev_mode
))
3756 if (GET_MODE_SIZE (candidate
) >= GET_MODE_SIZE (mode
)
3757 && lowpart_subreg_regno (REGNO (prev_rtx
),
3758 prev_mode
, candidate
) >= 0)
3760 target
= lowpart_subreg (candidate
, prev_rtx
,
3763 prev_mode
= candidate
;
3767 if (target
== nullptr)
3768 prev_rtx
= copy_to_reg (prev_rtx
);
3771 target
= lowpart_subreg (mode
, prev_rtx
, prev_mode
);
3776 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3777 bytes from constant string DATA + OFFSET and return it as target
3778 constant. If PREV isn't nullptr, it has the RTL info from the
3779 previous iteration. */
3782 builtin_memset_read_str (void *data
, void *prev
,
3783 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3784 fixed_size_mode mode
)
3786 const char *c
= (const char *) data
;
3787 unsigned int size
= GET_MODE_SIZE (mode
);
3789 rtx target
= gen_memset_value_from_prev ((by_pieces_prev
*) prev
,
3791 if (target
!= nullptr)
3793 rtx src
= gen_int_mode (*c
, QImode
);
3795 if (VECTOR_MODE_P (mode
))
3797 gcc_assert (GET_MODE_INNER (mode
) == QImode
);
3799 rtx const_vec
= gen_const_vec_duplicate (mode
, src
);
3801 /* Return CONST_VECTOR when called by a query function. */
3804 /* Use the move expander with CONST_VECTOR. */
3805 target
= targetm
.gen_memset_scratch_rtx (mode
);
3806 emit_move_insn (target
, const_vec
);
3810 char *p
= XALLOCAVEC (char, size
);
3812 memset (p
, *c
, size
);
3814 /* Vector modes should be handled above. */
3815 return c_readstr (p
, as_a
<scalar_int_mode
> (mode
));
3818 /* Callback routine for store_by_pieces. Return the RTL of a register
3819 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3820 char value given in the RTL register data. For example, if mode is
3821 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3822 nullptr, it has the RTL info from the previous iteration. */
3825 builtin_memset_gen_str (void *data
, void *prev
,
3826 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3827 fixed_size_mode mode
)
3833 size
= GET_MODE_SIZE (mode
);
3837 target
= gen_memset_value_from_prev ((by_pieces_prev
*) prev
, mode
);
3838 if (target
!= nullptr)
3841 if (VECTOR_MODE_P (mode
))
3843 gcc_assert (GET_MODE_INNER (mode
) == QImode
);
3845 /* vec_duplicate_optab is a precondition to pick a vector mode for
3846 the memset expander. */
3847 insn_code icode
= optab_handler (vec_duplicate_optab
, mode
);
3849 target
= targetm
.gen_memset_scratch_rtx (mode
);
3850 class expand_operand ops
[2];
3851 create_output_operand (&ops
[0], target
, mode
);
3852 create_input_operand (&ops
[1], (rtx
) data
, QImode
);
3853 expand_insn (icode
, 2, ops
);
3854 if (!rtx_equal_p (target
, ops
[0].value
))
3855 emit_move_insn (target
, ops
[0].value
);
3860 p
= XALLOCAVEC (char, size
);
3861 memset (p
, 1, size
);
3862 /* Vector modes should be handled above. */
3863 coeff
= c_readstr (p
, as_a
<scalar_int_mode
> (mode
));
3865 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3866 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3867 return force_reg (mode
, target
);
3870 /* Expand expression EXP, which is a call to the memset builtin. Return
3871 NULL_RTX if we failed the caller should emit a normal call, otherwise
3872 try to get the result in TARGET, if convenient (and in mode MODE if that's
3876 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3878 if (!validate_arglist (exp
,
3879 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3882 tree dest
= CALL_EXPR_ARG (exp
, 0);
3883 tree val
= CALL_EXPR_ARG (exp
, 1);
3884 tree len
= CALL_EXPR_ARG (exp
, 2);
3886 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3889 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3890 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3891 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3892 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3894 The strategy is to issue one store_by_pieces for each power of two,
3895 from most to least significant, guarded by a test on whether there
3896 are at least that many bytes left to copy in LEN.
3898 ??? Should we skip some powers of two in favor of loops? Maybe start
3899 at the max of TO/LEN/word alignment, at least when optimizing for
3900 size, instead of ensuring O(log len) dynamic compares? */
3903 try_store_by_multiple_pieces (rtx to
, rtx len
, unsigned int ctz_len
,
3904 unsigned HOST_WIDE_INT min_len
,
3905 unsigned HOST_WIDE_INT max_len
,
3906 rtx val
, char valc
, unsigned int align
)
3908 int max_bits
= floor_log2 (max_len
);
3909 int min_bits
= floor_log2 (min_len
);
3910 int sctz_len
= ctz_len
;
3912 gcc_checking_assert (sctz_len
>= 0);
3917 /* Bits more significant than TST_BITS are part of the shared prefix
3918 in the binary representation of both min_len and max_len. Since
3919 they're identical, we don't need to test them in the loop. */
3920 int tst_bits
= (max_bits
!= min_bits
? max_bits
3921 : floor_log2 (max_len
^ min_len
));
3923 /* Check whether it's profitable to start by storing a fixed BLKSIZE
3924 bytes, to lower max_bits. In the unlikely case of a constant LEN
3925 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
3926 single store_by_pieces, but otherwise, select the minimum multiple
3927 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
3928 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
3929 unsigned HOST_WIDE_INT blksize
;
3930 if (max_len
> min_len
)
3932 unsigned HOST_WIDE_INT alrng
= MAX (HOST_WIDE_INT_1U
<< ctz_len
,
3933 align
/ BITS_PER_UNIT
);
3934 blksize
= max_len
- (HOST_WIDE_INT_1U
<< tst_bits
) + alrng
;
3935 blksize
&= ~(alrng
- 1);
3937 else if (max_len
== min_len
)
3941 if (min_len
>= blksize
)
3944 min_bits
= floor_log2 (min_len
);
3946 max_bits
= floor_log2 (max_len
);
3948 tst_bits
= (max_bits
!= min_bits
? max_bits
3949 : floor_log2 (max_len
^ min_len
));
3954 /* Check that we can use store by pieces for the maximum store count
3955 we may issue (initial fixed-size block, plus conditional
3956 power-of-two-sized from max_bits to ctz_len. */
3957 unsigned HOST_WIDE_INT xlenest
= blksize
;
3959 xlenest
+= ((HOST_WIDE_INT_1U
<< max_bits
) * 2
3960 - (HOST_WIDE_INT_1U
<< ctz_len
));
3961 if (!can_store_by_pieces (xlenest
, builtin_memset_read_str
,
3962 &valc
, align
, true))
3965 by_pieces_constfn constfun
;
3969 constfun
= builtin_memset_gen_str
;
3970 constfundata
= val
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3975 constfun
= builtin_memset_read_str
;
3976 constfundata
= &valc
;
3979 rtx ptr
= copy_addr_to_reg (convert_to_mode (ptr_mode
, XEXP (to
, 0), 0));
3980 rtx rem
= copy_to_mode_reg (ptr_mode
, convert_to_mode (ptr_mode
, len
, 0));
3981 to
= replace_equiv_address (to
, ptr
);
3982 set_mem_align (to
, align
);
3986 to
= store_by_pieces (to
, blksize
,
3987 constfun
, constfundata
,
3989 max_len
!= 0 ? RETURN_END
: RETURN_BEGIN
);
3993 /* Adjust PTR, TO and REM. Since TO's address is likely
3994 PTR+offset, we have to replace it. */
3995 emit_move_insn (ptr
, force_operand (XEXP (to
, 0), NULL_RTX
));
3996 to
= replace_equiv_address (to
, ptr
);
3997 rtx rem_minus_blksize
= plus_constant (ptr_mode
, rem
, -blksize
);
3998 emit_move_insn (rem
, force_operand (rem_minus_blksize
, NULL_RTX
));
4001 /* Iterate over power-of-two block sizes from the maximum length to
4002 the least significant bit possibly set in the length. */
4003 for (int i
= max_bits
; i
>= sctz_len
; i
--)
4005 rtx_code_label
*label
= NULL
;
4006 blksize
= HOST_WIDE_INT_1U
<< i
;
4008 /* If we're past the bits shared between min_ and max_len, expand
4009 a test on the dynamic length, comparing it with the
4013 label
= gen_label_rtx ();
4014 emit_cmp_and_jump_insns (rem
, GEN_INT (blksize
), LT
, NULL
,
4016 profile_probability::even ());
4018 /* If we are at a bit that is in the prefix shared by min_ and
4019 max_len, skip this BLKSIZE if the bit is clear. */
4020 else if ((max_len
& blksize
) == 0)
4023 /* Issue a store of BLKSIZE bytes. */
4024 to
= store_by_pieces (to
, blksize
,
4025 constfun
, constfundata
,
4027 i
!= sctz_len
? RETURN_END
: RETURN_BEGIN
);
4029 /* Adjust REM and PTR, unless this is the last iteration. */
4032 emit_move_insn (ptr
, force_operand (XEXP (to
, 0), NULL_RTX
));
4033 to
= replace_equiv_address (to
, ptr
);
4034 rtx rem_minus_blksize
= plus_constant (ptr_mode
, rem
, -blksize
);
4035 emit_move_insn (rem
, force_operand (rem_minus_blksize
, NULL_RTX
));
4042 /* Given conditional stores, the offset can no longer be
4043 known, so clear it. */
4044 clear_mem_offset (to
);
4051 /* Helper function to do the actual work for expand_builtin_memset. The
4052 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4053 so that this can also be called without constructing an actual CALL_EXPR.
4054 The other arguments and return value are the same as for
4055 expand_builtin_memset. */
4058 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4059 rtx target
, machine_mode mode
, tree orig_exp
)
4062 enum built_in_function fcode
;
4063 machine_mode val_mode
;
4065 unsigned int dest_align
;
4066 rtx dest_mem
, dest_addr
, len_rtx
;
4067 HOST_WIDE_INT expected_size
= -1;
4068 unsigned int expected_align
= 0;
4069 unsigned HOST_WIDE_INT min_size
;
4070 unsigned HOST_WIDE_INT max_size
;
4071 unsigned HOST_WIDE_INT probable_max_size
;
4073 dest_align
= get_pointer_alignment (dest
);
4075 /* If DEST is not a pointer type, don't do this operation in-line. */
4076 if (dest_align
== 0)
4079 if (currently_expanding_gimple_stmt
)
4080 stringop_block_profile (currently_expanding_gimple_stmt
,
4081 &expected_align
, &expected_size
);
4083 if (expected_align
< dest_align
)
4084 expected_align
= dest_align
;
4086 /* If the LEN parameter is zero, return DEST. */
4087 if (integer_zerop (len
))
4089 /* Evaluate and ignore VAL in case it has side-effects. */
4090 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4091 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4094 /* Stabilize the arguments in case we fail. */
4095 dest
= builtin_save_expr (dest
);
4096 val
= builtin_save_expr (val
);
4097 len
= builtin_save_expr (len
);
4099 len_rtx
= expand_normal (len
);
4100 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4101 &probable_max_size
);
4102 dest_mem
= get_memory_rtx (dest
, len
);
4103 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4105 if (TREE_CODE (val
) != INTEGER_CST
4106 || target_char_cast (val
, &c
))
4110 val_rtx
= expand_normal (val
);
4111 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4113 /* Assume that we can memset by pieces if we can store
4114 * the coefficients by pieces (in the required modes).
4115 * We can't pass builtin_memset_gen_str as that emits RTL. */
4117 if (tree_fits_uhwi_p (len
)
4118 && can_store_by_pieces (tree_to_uhwi (len
),
4119 builtin_memset_read_str
, &c
, dest_align
,
4122 val_rtx
= force_reg (val_mode
, val_rtx
);
4123 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4124 builtin_memset_gen_str
, val_rtx
, dest_align
,
4125 true, RETURN_BEGIN
);
4127 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4128 dest_align
, expected_align
,
4129 expected_size
, min_size
, max_size
,
4131 && !try_store_by_multiple_pieces (dest_mem
, len_rtx
,
4138 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4139 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4145 if (tree_fits_uhwi_p (len
)
4146 && can_store_by_pieces (tree_to_uhwi (len
),
4147 builtin_memset_read_str
, &c
, dest_align
,
4149 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4150 builtin_memset_read_str
, &c
, dest_align
, true,
4152 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4153 gen_int_mode (c
, val_mode
),
4154 dest_align
, expected_align
,
4155 expected_size
, min_size
, max_size
,
4157 && !try_store_by_multiple_pieces (dest_mem
, len_rtx
,
4164 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4165 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4169 set_mem_align (dest_mem
, dest_align
);
4170 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4171 CALL_EXPR_TAILCALL (orig_exp
)
4172 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4173 expected_align
, expected_size
,
4175 probable_max_size
, tree_ctz (len
));
4179 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4180 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4186 fndecl
= get_callee_fndecl (orig_exp
);
4187 fcode
= DECL_FUNCTION_CODE (fndecl
);
4188 if (fcode
== BUILT_IN_MEMSET
)
4189 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4191 else if (fcode
== BUILT_IN_BZERO
)
4192 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4196 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4197 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4198 return expand_call (fn
, target
, target
== const0_rtx
);
4201 /* Expand expression EXP, which is a call to the bzero builtin. Return
4202 NULL_RTX if we failed the caller should emit a normal call. */
4205 expand_builtin_bzero (tree exp
)
4207 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4210 tree dest
= CALL_EXPR_ARG (exp
, 0);
4211 tree size
= CALL_EXPR_ARG (exp
, 1);
4213 /* New argument list transforming bzero(ptr x, int y) to
4214 memset(ptr x, int 0, size_t y). This is done this way
4215 so that if it isn't expanded inline, we fallback to
4216 calling bzero instead of memset. */
4218 location_t loc
= EXPR_LOCATION (exp
);
4220 return expand_builtin_memset_args (dest
, integer_zero_node
,
4221 fold_convert_loc (loc
,
4222 size_type_node
, size
),
4223 const0_rtx
, VOIDmode
, exp
);
4226 /* Try to expand cmpstr operation ICODE with the given operands.
4227 Return the result rtx on success, otherwise return null. */
4230 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4231 HOST_WIDE_INT align
)
4233 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4235 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4238 class expand_operand ops
[4];
4239 create_output_operand (&ops
[0], target
, insn_mode
);
4240 create_fixed_operand (&ops
[1], arg1_rtx
);
4241 create_fixed_operand (&ops
[2], arg2_rtx
);
4242 create_integer_operand (&ops
[3], align
);
4243 if (maybe_expand_insn (icode
, 4, ops
))
4244 return ops
[0].value
;
4248 /* Expand expression EXP, which is a call to the memcmp built-in function.
4249 Return NULL_RTX if we failed and the caller should emit a normal call,
4250 otherwise try to get the result in TARGET, if convenient.
4251 RESULT_EQ is true if we can relax the returned value to be either zero
4252 or nonzero, without caring about the sign. */
4255 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4257 if (!validate_arglist (exp
,
4258 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4261 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4262 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4263 tree len
= CALL_EXPR_ARG (exp
, 2);
4265 /* Diagnose calls where the specified length exceeds the size of either
4267 if (!check_read_access (exp
, arg1
, len
, 0)
4268 || !check_read_access (exp
, arg2
, len
, 0))
4271 /* Due to the performance benefit, always inline the calls first
4272 when result_eq is false. */
4273 rtx result
= NULL_RTX
;
4274 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
4275 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
4277 result
= inline_expand_builtin_bytecmp (exp
, target
);
4282 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4283 location_t loc
= EXPR_LOCATION (exp
);
4285 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4286 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4288 /* If we don't have POINTER_TYPE, call the function. */
4289 if (arg1_align
== 0 || arg2_align
== 0)
4292 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4293 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4294 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4296 /* Set MEM_SIZE as appropriate. */
4297 if (CONST_INT_P (len_rtx
))
4299 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4300 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4303 by_pieces_constfn constfn
= NULL
;
4305 /* Try to get the byte representation of the constant ARG2 (or, only
4306 when the function's result is used for equality to zero, ARG1)
4307 points to, with its byte size in NBYTES. */
4308 unsigned HOST_WIDE_INT nbytes
;
4309 const char *rep
= getbyterep (arg2
, &nbytes
);
4310 if (result_eq
&& rep
== NULL
)
4312 /* For equality to zero the arguments are interchangeable. */
4313 rep
= getbyterep (arg1
, &nbytes
);
4315 std::swap (arg1_rtx
, arg2_rtx
);
4318 /* If the function's constant bound LEN_RTX is less than or equal
4319 to the byte size of the representation of the constant argument,
4320 and if block move would be done by pieces, we can avoid loading
4321 the bytes from memory and only store the computed constant result. */
4323 && CONST_INT_P (len_rtx
)
4324 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= nbytes
)
4325 constfn
= builtin_memcpy_read_str
;
4327 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4328 TREE_TYPE (len
), target
,
4330 CONST_CAST (char *, rep
));
4334 /* Return the value in the proper mode for this function. */
4335 if (GET_MODE (result
) == mode
)
4340 convert_move (target
, result
, 0);
4344 return convert_to_mode (mode
, result
, 0);
4350 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4351 if we failed the caller should emit a normal call, otherwise try to get
4352 the result in TARGET, if convenient. */
4355 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4357 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4360 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4361 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4363 /* Due to the performance benefit, always inline the calls first. */
4364 rtx result
= NULL_RTX
;
4365 result
= inline_expand_builtin_bytecmp (exp
, target
);
4369 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4370 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4371 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4374 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4375 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4377 /* If we don't have POINTER_TYPE, call the function. */
4378 if (arg1_align
== 0 || arg2_align
== 0)
4381 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4382 arg1
= builtin_save_expr (arg1
);
4383 arg2
= builtin_save_expr (arg2
);
4385 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4386 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4388 /* Try to call cmpstrsi. */
4389 if (cmpstr_icode
!= CODE_FOR_nothing
)
4390 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4391 MIN (arg1_align
, arg2_align
));
4393 /* Try to determine at least one length and call cmpstrnsi. */
4394 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4399 tree len1
= c_strlen (arg1
, 1);
4400 tree len2
= c_strlen (arg2
, 1);
4403 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4405 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4407 /* If we don't have a constant length for the first, use the length
4408 of the second, if we know it. We don't require a constant for
4409 this case; some cost analysis could be done if both are available
4410 but neither is constant. For now, assume they're equally cheap,
4411 unless one has side effects. If both strings have constant lengths,
4418 else if (TREE_SIDE_EFFECTS (len1
))
4420 else if (TREE_SIDE_EFFECTS (len2
))
4422 else if (TREE_CODE (len1
) != INTEGER_CST
)
4424 else if (TREE_CODE (len2
) != INTEGER_CST
)
4426 else if (tree_int_cst_lt (len1
, len2
))
4431 /* If both arguments have side effects, we cannot optimize. */
4432 if (len
&& !TREE_SIDE_EFFECTS (len
))
4434 arg3_rtx
= expand_normal (len
);
4435 result
= expand_cmpstrn_or_cmpmem
4436 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4437 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4441 tree fndecl
= get_callee_fndecl (exp
);
4444 /* Return the value in the proper mode for this function. */
4445 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4446 if (GET_MODE (result
) == mode
)
4449 return convert_to_mode (mode
, result
, 0);
4450 convert_move (target
, result
, 0);
4454 /* Expand the library call ourselves using a stabilized argument
4455 list to avoid re-evaluating the function's arguments twice. */
4456 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4457 copy_warning (fn
, exp
);
4458 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4459 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4460 return expand_call (fn
, target
, target
== const0_rtx
);
4463 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4464 NULL_RTX if we failed the caller should emit a normal call, otherwise
4465 try to get the result in TARGET, if convenient. */
4468 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4469 ATTRIBUTE_UNUSED machine_mode mode
)
4471 if (!validate_arglist (exp
,
4472 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4475 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4476 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4477 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4479 location_t loc
= EXPR_LOCATION (exp
);
4480 tree len1
= c_strlen (arg1
, 1);
4481 tree len2
= c_strlen (arg2
, 1);
4483 /* Due to the performance benefit, always inline the calls first. */
4484 rtx result
= NULL_RTX
;
4485 result
= inline_expand_builtin_bytecmp (exp
, target
);
4489 /* If c_strlen can determine an expression for one of the string
4490 lengths, and it doesn't have side effects, then emit cmpstrnsi
4491 using length MIN(strlen(string)+1, arg3). */
4492 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4493 if (cmpstrn_icode
== CODE_FOR_nothing
)
4498 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4499 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4502 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4504 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4506 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4508 /* If we don't have a constant length for the first, use the length
4509 of the second, if we know it. If neither string is constant length,
4510 use the given length argument. We don't require a constant for
4511 this case; some cost analysis could be done if both are available
4512 but neither is constant. For now, assume they're equally cheap,
4513 unless one has side effects. If both strings have constant lengths,
4522 else if (TREE_SIDE_EFFECTS (len1
))
4524 else if (TREE_SIDE_EFFECTS (len2
))
4526 else if (TREE_CODE (len1
) != INTEGER_CST
)
4528 else if (TREE_CODE (len2
) != INTEGER_CST
)
4530 else if (tree_int_cst_lt (len1
, len2
))
4535 /* If we are not using the given length, we must incorporate it here.
4536 The actual new length parameter will be MIN(len,arg3) in this case. */
4539 len
= fold_convert_loc (loc
, sizetype
, len
);
4540 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4542 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4543 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4544 rtx arg3_rtx
= expand_normal (len
);
4545 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4546 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4547 MIN (arg1_align
, arg2_align
));
4549 tree fndecl
= get_callee_fndecl (exp
);
4552 /* Return the value in the proper mode for this function. */
4553 mode
= TYPE_MODE (TREE_TYPE (exp
));
4554 if (GET_MODE (result
) == mode
)
4557 return convert_to_mode (mode
, result
, 0);
4558 convert_move (target
, result
, 0);
4562 /* Expand the library call ourselves using a stabilized argument
4563 list to avoid re-evaluating the function's arguments twice. */
4564 tree call
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
4565 copy_warning (call
, exp
);
4566 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
4567 CALL_EXPR_TAILCALL (call
) = CALL_EXPR_TAILCALL (exp
);
4568 return expand_call (call
, target
, target
== const0_rtx
);
4571 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4572 if that's convenient. */
4575 expand_builtin_saveregs (void)
4580 /* Don't do __builtin_saveregs more than once in a function.
4581 Save the result of the first call and reuse it. */
4582 if (saveregs_value
!= 0)
4583 return saveregs_value
;
4585 /* When this function is called, it means that registers must be
4586 saved on entry to this function. So we migrate the call to the
4587 first insn of this function. */
4591 /* Do whatever the machine needs done in this case. */
4592 val
= targetm
.calls
.expand_builtin_saveregs ();
4597 saveregs_value
= val
;
4599 /* Put the insns after the NOTE that starts the function. If this
4600 is inside a start_sequence, make the outer-level insn chain current, so
4601 the code is placed at the start of the function. */
4602 push_topmost_sequence ();
4603 emit_insn_after (seq
, entry_of_function ());
4604 pop_topmost_sequence ();
4609 /* Expand a call to __builtin_next_arg. */
4612 expand_builtin_next_arg (void)
4614 /* Checking arguments is already done in fold_builtin_next_arg
4615 that must be called before this function. */
4616 return expand_binop (ptr_mode
, add_optab
,
4617 crtl
->args
.internal_arg_pointer
,
4618 crtl
->args
.arg_offset_rtx
,
4619 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4622 /* Make it easier for the backends by protecting the valist argument
4623 from multiple evaluations. */
4626 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4628 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4630 /* The current way of determining the type of valist is completely
4631 bogus. We should have the information on the va builtin instead. */
4633 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4635 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4637 if (TREE_SIDE_EFFECTS (valist
))
4638 valist
= save_expr (valist
);
4640 /* For this case, the backends will be expecting a pointer to
4641 vatype, but it's possible we've actually been given an array
4642 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4644 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4646 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4647 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4652 tree pt
= build_pointer_type (vatype
);
4656 if (! TREE_SIDE_EFFECTS (valist
))
4659 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4660 TREE_SIDE_EFFECTS (valist
) = 1;
4663 if (TREE_SIDE_EFFECTS (valist
))
4664 valist
= save_expr (valist
);
4665 valist
= fold_build2_loc (loc
, MEM_REF
,
4666 vatype
, valist
, build_int_cst (pt
, 0));
4672 /* The "standard" definition of va_list is void*. */
4675 std_build_builtin_va_list (void)
4677 return ptr_type_node
;
4680 /* The "standard" abi va_list is va_list_type_node. */
4683 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4685 return va_list_type_node
;
4688 /* The "standard" type of va_list is va_list_type_node. */
4691 std_canonical_va_list_type (tree type
)
4695 wtype
= va_list_type_node
;
4698 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4700 /* If va_list is an array type, the argument may have decayed
4701 to a pointer type, e.g. by being passed to another function.
4702 In that case, unwrap both types so that we can compare the
4703 underlying records. */
4704 if (TREE_CODE (htype
) == ARRAY_TYPE
4705 || POINTER_TYPE_P (htype
))
4707 wtype
= TREE_TYPE (wtype
);
4708 htype
= TREE_TYPE (htype
);
4711 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4712 return va_list_type_node
;
4717 /* The "standard" implementation of va_start: just assign `nextarg' to
4721 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4723 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4724 convert_move (va_r
, nextarg
, 0);
4727 /* Expand EXP, a call to __builtin_va_start. */
4730 expand_builtin_va_start (tree exp
)
4734 location_t loc
= EXPR_LOCATION (exp
);
4736 if (call_expr_nargs (exp
) < 2)
4738 error_at (loc
, "too few arguments to function %<va_start%>");
4742 if (fold_builtin_next_arg (exp
, true))
4745 nextarg
= expand_builtin_next_arg ();
4746 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4748 if (targetm
.expand_builtin_va_start
)
4749 targetm
.expand_builtin_va_start (valist
, nextarg
);
4751 std_expand_builtin_va_start (valist
, nextarg
);
4756 /* Expand EXP, a call to __builtin_va_end. */
4759 expand_builtin_va_end (tree exp
)
4761 tree valist
= CALL_EXPR_ARG (exp
, 0);
4763 /* Evaluate for side effects, if needed. I hate macros that don't
4765 if (TREE_SIDE_EFFECTS (valist
))
4766 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4771 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4772 builtin rather than just as an assignment in stdarg.h because of the
4773 nastiness of array-type va_list types. */
4776 expand_builtin_va_copy (tree exp
)
4779 location_t loc
= EXPR_LOCATION (exp
);
4781 dst
= CALL_EXPR_ARG (exp
, 0);
4782 src
= CALL_EXPR_ARG (exp
, 1);
4784 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4785 src
= stabilize_va_list_loc (loc
, src
, 0);
4787 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4789 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4791 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4792 TREE_SIDE_EFFECTS (t
) = 1;
4793 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4797 rtx dstb
, srcb
, size
;
4799 /* Evaluate to pointers. */
4800 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4801 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4802 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4803 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4805 dstb
= convert_memory_address (Pmode
, dstb
);
4806 srcb
= convert_memory_address (Pmode
, srcb
);
4808 /* "Dereference" to BLKmode memories. */
4809 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4810 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4811 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4812 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4813 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4814 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4817 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4823 /* Expand a call to one of the builtin functions __builtin_frame_address or
4824 __builtin_return_address. */
4827 expand_builtin_frame_address (tree fndecl
, tree exp
)
4829 /* The argument must be a nonnegative integer constant.
4830 It counts the number of frames to scan up the stack.
4831 The value is either the frame pointer value or the return
4832 address saved in that frame. */
4833 if (call_expr_nargs (exp
) == 0)
4834 /* Warning about missing arg was already issued. */
4836 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4838 error ("invalid argument to %qD", fndecl
);
4843 /* Number of frames to scan up the stack. */
4844 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4846 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4848 /* Some ports cannot access arbitrary stack frames. */
4851 warning (0, "unsupported argument to %qD", fndecl
);
4857 /* Warn since no effort is made to ensure that any frame
4858 beyond the current one exists or can be safely reached. */
4859 warning (OPT_Wframe_address
, "calling %qD with "
4860 "a nonzero argument is unsafe", fndecl
);
4863 /* For __builtin_frame_address, return what we've got. */
4864 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4868 && ! CONSTANT_P (tem
))
4869 tem
= copy_addr_to_reg (tem
);
4874 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4875 failed and the caller should emit a normal call. */
4878 expand_builtin_alloca (tree exp
)
4883 tree fndecl
= get_callee_fndecl (exp
);
4884 HOST_WIDE_INT max_size
;
4885 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4886 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
4888 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4889 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
4891 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
4892 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4893 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4898 /* Compute the argument. */
4899 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4901 /* Compute the alignment. */
4902 align
= (fcode
== BUILT_IN_ALLOCA
4904 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
4906 /* Compute the maximum size. */
4907 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4908 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
4911 /* Allocate the desired space. If the allocation stems from the declaration
4912 of a variable-sized object, it cannot accumulate. */
4914 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
4915 result
= convert_memory_address (ptr_mode
, result
);
4917 /* Dynamic allocations for variables are recorded during gimplification. */
4918 if (!alloca_for_var
&& (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
))
4919 record_dynamic_alloc (exp
);
4924 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
4925 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
4926 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
4927 handle_builtin_stack_restore function. */
4930 expand_asan_emit_allocas_unpoison (tree exp
)
4932 tree arg0
= CALL_EXPR_ARG (exp
, 0);
4933 tree arg1
= CALL_EXPR_ARG (exp
, 1);
4934 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
4935 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
4936 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
4937 stack_pointer_rtx
, NULL_RTX
, 0,
4939 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
4940 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
4942 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
4943 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
4944 top
, ptr_mode
, bot
, ptr_mode
);
4948 /* Expand a call to bswap builtin in EXP.
4949 Return NULL_RTX if a normal call should be emitted rather than expanding the
4950 function in-line. If convenient, the result should be placed in TARGET.
4951 SUBTARGET may be used as the target for computing one of EXP's operands. */
4954 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4960 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4963 arg
= CALL_EXPR_ARG (exp
, 0);
4964 op0
= expand_expr (arg
,
4965 subtarget
&& GET_MODE (subtarget
) == target_mode
4966 ? subtarget
: NULL_RTX
,
4967 target_mode
, EXPAND_NORMAL
);
4968 if (GET_MODE (op0
) != target_mode
)
4969 op0
= convert_to_mode (target_mode
, op0
, 1);
4971 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4973 gcc_assert (target
);
4975 return convert_to_mode (target_mode
, target
, 1);
4978 /* Expand a call to a unary builtin in EXP.
4979 Return NULL_RTX if a normal call should be emitted rather than expanding the
4980 function in-line. If convenient, the result should be placed in TARGET.
4981 SUBTARGET may be used as the target for computing one of EXP's operands. */
4984 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4985 rtx subtarget
, optab op_optab
)
4989 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4992 /* Compute the argument. */
4993 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4995 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4996 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4997 VOIDmode
, EXPAND_NORMAL
);
4998 /* Compute op, into TARGET if possible.
4999 Set TARGET to wherever the result comes back. */
5000 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5001 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5002 gcc_assert (target
);
5004 return convert_to_mode (target_mode
, target
, 0);
5007 /* Expand a call to __builtin_expect. We just return our argument
5008 as the builtin_expect semantic should've been already executed by
5009 tree branch prediction pass. */
5012 expand_builtin_expect (tree exp
, rtx target
)
5016 if (call_expr_nargs (exp
) < 2)
5018 arg
= CALL_EXPR_ARG (exp
, 0);
5020 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5021 /* When guessing was done, the hints should be already stripped away. */
5022 gcc_assert (!flag_guess_branch_prob
5023 || optimize
== 0 || seen_error ());
5027 /* Expand a call to __builtin_expect_with_probability. We just return our
5028 argument as the builtin_expect semantic should've been already executed by
5029 tree branch prediction pass. */
5032 expand_builtin_expect_with_probability (tree exp
, rtx target
)
5036 if (call_expr_nargs (exp
) < 3)
5038 arg
= CALL_EXPR_ARG (exp
, 0);
5040 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5041 /* When guessing was done, the hints should be already stripped away. */
5042 gcc_assert (!flag_guess_branch_prob
5043 || optimize
== 0 || seen_error ());
5048 /* Expand a call to __builtin_assume_aligned. We just return our first
5049 argument as the builtin_assume_aligned semantic should've been already
5053 expand_builtin_assume_aligned (tree exp
, rtx target
)
5055 if (call_expr_nargs (exp
) < 2)
5057 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5059 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5060 && (call_expr_nargs (exp
) < 3
5061 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5066 expand_builtin_trap (void)
5068 if (targetm
.have_trap ())
5070 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5071 /* For trap insns when not accumulating outgoing args force
5072 REG_ARGS_SIZE note to prevent crossjumping of calls with
5073 different args sizes. */
5074 if (!ACCUMULATE_OUTGOING_ARGS
)
5075 add_args_size_note (insn
, stack_pointer_delta
);
5079 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5080 tree call_expr
= build_call_expr (fn
, 0);
5081 expand_call (call_expr
, NULL_RTX
, false);
5087 /* Expand a call to __builtin_unreachable. We do nothing except emit
5088 a barrier saying that control flow will not pass here.
5090 It is the responsibility of the program being compiled to ensure
5091 that control flow does never reach __builtin_unreachable. */
5093 expand_builtin_unreachable (void)
5098 /* Expand EXP, a call to fabs, fabsf or fabsl.
5099 Return NULL_RTX if a normal call should be emitted rather than expanding
5100 the function inline. If convenient, the result should be placed
5101 in TARGET. SUBTARGET may be used as the target for computing
5105 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5111 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5114 arg
= CALL_EXPR_ARG (exp
, 0);
5115 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5116 mode
= TYPE_MODE (TREE_TYPE (arg
));
5117 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5118 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5121 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5122 Return NULL is a normal call should be emitted rather than expanding the
5123 function inline. If convenient, the result should be placed in TARGET.
5124 SUBTARGET may be used as the target for computing the operand. */
5127 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5132 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5135 arg
= CALL_EXPR_ARG (exp
, 0);
5136 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5138 arg
= CALL_EXPR_ARG (exp
, 1);
5139 op1
= expand_normal (arg
);
5141 return expand_copysign (op0
, op1
, target
);
5144 /* Emit a call to __builtin___clear_cache. */
5147 default_emit_call_builtin___clear_cache (rtx begin
, rtx end
)
5149 rtx callee
= gen_rtx_SYMBOL_REF (Pmode
,
5150 BUILTIN_ASM_NAME_PTR
5151 (BUILT_IN_CLEAR_CACHE
));
5153 emit_library_call (callee
,
5154 LCT_NORMAL
, VOIDmode
,
5155 convert_memory_address (ptr_mode
, begin
), ptr_mode
,
5156 convert_memory_address (ptr_mode
, end
), ptr_mode
);
5159 /* Emit a call to __builtin___clear_cache, unless the target specifies
5160 it as do-nothing. This function can be used by trampoline
5161 finalizers to duplicate the effects of expanding a call to the
5162 clear_cache builtin. */
5165 maybe_emit_call_builtin___clear_cache (rtx begin
, rtx end
)
5167 if ((GET_MODE (begin
) != ptr_mode
&& GET_MODE (begin
) != Pmode
)
5168 || (GET_MODE (end
) != ptr_mode
&& GET_MODE (end
) != Pmode
))
5170 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5174 if (targetm
.have_clear_cache ())
5176 /* We have a "clear_cache" insn, and it will handle everything. */
5177 class expand_operand ops
[2];
5179 create_address_operand (&ops
[0], begin
);
5180 create_address_operand (&ops
[1], end
);
5182 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5187 #ifndef CLEAR_INSN_CACHE
5188 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5189 does nothing. There is no need to call it. Do nothing. */
5191 #endif /* CLEAR_INSN_CACHE */
5194 targetm
.calls
.emit_call_builtin___clear_cache (begin
, end
);
5197 /* Expand a call to __builtin___clear_cache. */
5200 expand_builtin___clear_cache (tree exp
)
5203 rtx begin_rtx
, end_rtx
;
5205 /* We must not expand to a library call. If we did, any
5206 fallback library function in libgcc that might contain a call to
5207 __builtin___clear_cache() would recurse infinitely. */
5208 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5210 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5214 begin
= CALL_EXPR_ARG (exp
, 0);
5215 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5217 end
= CALL_EXPR_ARG (exp
, 1);
5218 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5220 maybe_emit_call_builtin___clear_cache (begin_rtx
, end_rtx
);
5223 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5226 round_trampoline_addr (rtx tramp
)
5228 rtx temp
, addend
, mask
;
5230 /* If we don't need too much alignment, we'll have been guaranteed
5231 proper alignment by get_trampoline_type. */
5232 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5235 /* Round address up to desired boundary. */
5236 temp
= gen_reg_rtx (Pmode
);
5237 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5238 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5240 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5241 temp
, 0, OPTAB_LIB_WIDEN
);
5242 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5243 temp
, 0, OPTAB_LIB_WIDEN
);
5249 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5251 tree t_tramp
, t_func
, t_chain
;
5252 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5254 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5255 POINTER_TYPE
, VOID_TYPE
))
5258 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5259 t_func
= CALL_EXPR_ARG (exp
, 1);
5260 t_chain
= CALL_EXPR_ARG (exp
, 2);
5262 r_tramp
= expand_normal (t_tramp
);
5263 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5264 MEM_NOTRAP_P (m_tramp
) = 1;
5266 /* If ONSTACK, the TRAMP argument should be the address of a field
5267 within the local function's FRAME decl. Either way, let's see if
5268 we can fill in the MEM_ATTRs for this memory. */
5269 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5270 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5272 /* Creator of a heap trampoline is responsible for making sure the
5273 address is aligned to at least STACK_BOUNDARY. Normally malloc
5274 will ensure this anyhow. */
5275 tmp
= round_trampoline_addr (r_tramp
);
5278 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5279 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5280 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5283 /* The FUNC argument should be the address of the nested function.
5284 Extract the actual function decl to pass to the hook. */
5285 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5286 t_func
= TREE_OPERAND (t_func
, 0);
5287 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5289 r_chain
= expand_normal (t_chain
);
5291 /* Generate insns to initialize the trampoline. */
5292 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5296 trampolines_created
= 1;
5298 if (targetm
.calls
.custom_function_descriptors
!= 0)
5299 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5300 "trampoline generated for nested function %qD", t_func
);
5307 expand_builtin_adjust_trampoline (tree exp
)
5311 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5314 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5315 tramp
= round_trampoline_addr (tramp
);
5316 if (targetm
.calls
.trampoline_adjust_address
)
5317 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5322 /* Expand a call to the builtin descriptor initialization routine.
5323 A descriptor is made up of a couple of pointers to the static
5324 chain and the code entry in this order. */
5327 expand_builtin_init_descriptor (tree exp
)
5329 tree t_descr
, t_func
, t_chain
;
5330 rtx m_descr
, r_descr
, r_func
, r_chain
;
5332 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5336 t_descr
= CALL_EXPR_ARG (exp
, 0);
5337 t_func
= CALL_EXPR_ARG (exp
, 1);
5338 t_chain
= CALL_EXPR_ARG (exp
, 2);
5340 r_descr
= expand_normal (t_descr
);
5341 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5342 MEM_NOTRAP_P (m_descr
) = 1;
5343 set_mem_align (m_descr
, GET_MODE_ALIGNMENT (ptr_mode
));
5345 r_func
= expand_normal (t_func
);
5346 r_chain
= expand_normal (t_chain
);
5348 /* Generate insns to initialize the descriptor. */
5349 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5350 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5351 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5356 /* Expand a call to the builtin descriptor adjustment routine. */
5359 expand_builtin_adjust_descriptor (tree exp
)
5363 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5366 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5368 /* Unalign the descriptor to allow runtime identification. */
5369 tramp
= plus_constant (ptr_mode
, tramp
,
5370 targetm
.calls
.custom_function_descriptors
);
5372 return force_operand (tramp
, NULL_RTX
);
5375 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5376 function. The function first checks whether the back end provides
5377 an insn to implement signbit for the respective mode. If not, it
5378 checks whether the floating point format of the value is such that
5379 the sign bit can be extracted. If that is not the case, error out.
5380 EXP is the expression that is a call to the builtin function; if
5381 convenient, the result should be placed in TARGET. */
5383 expand_builtin_signbit (tree exp
, rtx target
)
5385 const struct real_format
*fmt
;
5386 scalar_float_mode fmode
;
5387 scalar_int_mode rmode
, imode
;
5390 enum insn_code icode
;
5392 location_t loc
= EXPR_LOCATION (exp
);
5394 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5397 arg
= CALL_EXPR_ARG (exp
, 0);
5398 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5399 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5400 fmt
= REAL_MODE_FORMAT (fmode
);
5402 arg
= builtin_save_expr (arg
);
5404 /* Expand the argument yielding a RTX expression. */
5405 temp
= expand_normal (arg
);
5407 /* Check if the back end provides an insn that handles signbit for the
5409 icode
= optab_handler (signbit_optab
, fmode
);
5410 if (icode
!= CODE_FOR_nothing
)
5412 rtx_insn
*last
= get_last_insn ();
5413 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5414 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5416 delete_insns_since (last
);
5419 /* For floating point formats without a sign bit, implement signbit
5421 bitpos
= fmt
->signbit_ro
;
5424 /* But we can't do this if the format supports signed zero. */
5425 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5427 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5428 build_real (TREE_TYPE (arg
), dconst0
));
5429 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5432 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5434 imode
= int_mode_for_mode (fmode
).require ();
5435 temp
= gen_lowpart (imode
, temp
);
5440 /* Handle targets with different FP word orders. */
5441 if (FLOAT_WORDS_BIG_ENDIAN
)
5442 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5444 word
= bitpos
/ BITS_PER_WORD
;
5445 temp
= operand_subword_force (temp
, word
, fmode
);
5446 bitpos
= bitpos
% BITS_PER_WORD
;
5449 /* Force the intermediate word_mode (or narrower) result into a
5450 register. This avoids attempting to create paradoxical SUBREGs
5451 of floating point modes below. */
5452 temp
= force_reg (imode
, temp
);
5454 /* If the bitpos is within the "result mode" lowpart, the operation
5455 can be implement with a single bitwise AND. Otherwise, we need
5456 a right shift and an AND. */
5458 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5460 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5462 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5463 temp
= gen_lowpart (rmode
, temp
);
5464 temp
= expand_binop (rmode
, and_optab
, temp
,
5465 immed_wide_int_const (mask
, rmode
),
5466 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5470 /* Perform a logical right shift to place the signbit in the least
5471 significant bit, then truncate the result to the desired mode
5472 and mask just this bit. */
5473 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5474 temp
= gen_lowpart (rmode
, temp
);
5475 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5476 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5482 /* Expand fork or exec calls. TARGET is the desired target of the
5483 call. EXP is the call. FN is the
5484 identificator of the actual function. IGNORE is nonzero if the
5485 value is to be ignored. */
5488 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5493 if (DECL_FUNCTION_CODE (fn
) != BUILT_IN_FORK
)
5495 tree path
= CALL_EXPR_ARG (exp
, 0);
5496 /* Detect unterminated path. */
5497 if (!check_read_access (exp
, path
))
5500 /* Also detect unterminated first argument. */
5501 switch (DECL_FUNCTION_CODE (fn
))
5503 case BUILT_IN_EXECL
:
5504 case BUILT_IN_EXECLE
:
5505 case BUILT_IN_EXECLP
:
5506 if (!check_read_access (exp
, path
))
5514 /* If we are not profiling, just call the function. */
5515 if (!profile_arc_flag
)
5518 /* Otherwise call the wrapper. This should be equivalent for the rest of
5519 compiler, so the code does not diverge, and the wrapper may run the
5520 code necessary for keeping the profiling sane. */
5522 switch (DECL_FUNCTION_CODE (fn
))
5525 id
= get_identifier ("__gcov_fork");
5528 case BUILT_IN_EXECL
:
5529 id
= get_identifier ("__gcov_execl");
5532 case BUILT_IN_EXECV
:
5533 id
= get_identifier ("__gcov_execv");
5536 case BUILT_IN_EXECLP
:
5537 id
= get_identifier ("__gcov_execlp");
5540 case BUILT_IN_EXECLE
:
5541 id
= get_identifier ("__gcov_execle");
5544 case BUILT_IN_EXECVP
:
5545 id
= get_identifier ("__gcov_execvp");
5548 case BUILT_IN_EXECVE
:
5549 id
= get_identifier ("__gcov_execve");
5556 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5557 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5558 DECL_EXTERNAL (decl
) = 1;
5559 TREE_PUBLIC (decl
) = 1;
5560 DECL_ARTIFICIAL (decl
) = 1;
5561 TREE_NOTHROW (decl
) = 1;
5562 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5563 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5564 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5565 return expand_call (call
, target
, ignore
);
5570 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5571 the pointer in these functions is void*, the tree optimizers may remove
5572 casts. The mode computed in expand_builtin isn't reliable either, due
5573 to __sync_bool_compare_and_swap.
5575 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5576 group of builtins. This gives us log2 of the mode size. */
5578 static inline machine_mode
5579 get_builtin_sync_mode (int fcode_diff
)
5581 /* The size is not negotiable, so ask not to get BLKmode in return
5582 if the target indicates that a smaller size would be better. */
5583 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5586 /* Expand the memory expression LOC and return the appropriate memory operand
5587 for the builtin_sync operations. */
5590 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5593 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
5594 ? TREE_TYPE (TREE_TYPE (loc
))
5596 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
5598 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
5599 addr
= convert_memory_address (addr_mode
, addr
);
5601 /* Note that we explicitly do not want any alias information for this
5602 memory, so that we kill all other live memories. Otherwise we don't
5603 satisfy the full barrier semantics of the intrinsic. */
5604 mem
= gen_rtx_MEM (mode
, addr
);
5606 set_mem_addr_space (mem
, addr_space
);
5608 mem
= validize_mem (mem
);
5610 /* The alignment needs to be at least according to that of the mode. */
5611 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5612 get_pointer_alignment (loc
)));
5613 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5614 MEM_VOLATILE_P (mem
) = 1;
5619 /* Make sure an argument is in the right mode.
5620 EXP is the tree argument.
5621 MODE is the mode it should be in. */
5624 expand_expr_force_mode (tree exp
, machine_mode mode
)
5627 machine_mode old_mode
;
5629 if (TREE_CODE (exp
) == SSA_NAME
5630 && TYPE_MODE (TREE_TYPE (exp
)) != mode
)
5632 /* Undo argument promotion if possible, as combine might not
5633 be able to do it later due to MEM_VOLATILE_P uses in the
5635 gimple
*g
= get_gimple_for_ssa_name (exp
);
5636 if (g
&& gimple_assign_cast_p (g
))
5638 tree rhs
= gimple_assign_rhs1 (g
);
5639 tree_code code
= gimple_assign_rhs_code (g
);
5640 if (CONVERT_EXPR_CODE_P (code
)
5641 && TYPE_MODE (TREE_TYPE (rhs
)) == mode
5642 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5643 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
5644 && (TYPE_PRECISION (TREE_TYPE (exp
))
5645 > TYPE_PRECISION (TREE_TYPE (rhs
))))
5650 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5651 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5652 of CONST_INTs, where we know the old_mode only from the call argument. */
5654 old_mode
= GET_MODE (val
);
5655 if (old_mode
== VOIDmode
)
5656 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5657 val
= convert_modes (mode
, old_mode
, val
, 1);
5662 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5663 EXP is the CALL_EXPR. CODE is the rtx code
5664 that corresponds to the arithmetic or logical operation from the name;
5665 an exception here is that NOT actually means NAND. TARGET is an optional
5666 place for us to store the results; AFTER is true if this is the
5667 fetch_and_xxx form. */
5670 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5671 enum rtx_code code
, bool after
,
5675 location_t loc
= EXPR_LOCATION (exp
);
5677 if (code
== NOT
&& warn_sync_nand
)
5679 tree fndecl
= get_callee_fndecl (exp
);
5680 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5682 static bool warned_f_a_n
, warned_n_a_f
;
5686 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5687 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5688 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5689 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5690 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5694 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5695 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5696 warned_f_a_n
= true;
5699 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5700 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5701 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5702 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5703 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5707 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5708 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5709 warned_n_a_f
= true;
5717 /* Expand the operands. */
5718 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5719 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5721 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5725 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5726 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5727 true if this is the boolean form. TARGET is a place for us to store the
5728 results; this is NOT optional if IS_BOOL is true. */
5731 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5732 bool is_bool
, rtx target
)
5734 rtx old_val
, new_val
, mem
;
5737 /* Expand the operands. */
5738 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5739 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5740 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5742 pbool
= poval
= NULL
;
5743 if (target
!= const0_rtx
)
5750 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5751 false, MEMMODEL_SYNC_SEQ_CST
,
5752 MEMMODEL_SYNC_SEQ_CST
))
5758 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5759 general form is actually an atomic exchange, and some targets only
5760 support a reduced form with the second argument being a constant 1.
5761 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5765 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5770 /* Expand the operands. */
5771 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5772 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5774 return expand_sync_lock_test_and_set (target
, mem
, val
);
5777 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5780 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5784 /* Expand the operands. */
5785 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5787 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5790 /* Given an integer representing an ``enum memmodel'', verify its
5791 correctness and return the memory model enum. */
5793 static enum memmodel
5794 get_memmodel (tree exp
)
5797 unsigned HOST_WIDE_INT val
;
5799 = expansion_point_location_if_in_system_header (input_location
);
5801 /* If the parameter is not a constant, it's a run time value so we'll just
5802 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5803 if (TREE_CODE (exp
) != INTEGER_CST
)
5804 return MEMMODEL_SEQ_CST
;
5806 op
= expand_normal (exp
);
5809 if (targetm
.memmodel_check
)
5810 val
= targetm
.memmodel_check (val
);
5811 else if (val
& ~MEMMODEL_MASK
)
5813 warning_at (loc
, OPT_Winvalid_memory_model
,
5814 "unknown architecture specifier in memory model to builtin");
5815 return MEMMODEL_SEQ_CST
;
5818 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5819 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5821 warning_at (loc
, OPT_Winvalid_memory_model
,
5822 "invalid memory model argument to builtin");
5823 return MEMMODEL_SEQ_CST
;
5826 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5827 be conservative and promote consume to acquire. */
5828 if (val
== MEMMODEL_CONSUME
)
5829 val
= MEMMODEL_ACQUIRE
;
5831 return (enum memmodel
) val
;
5834 /* Expand the __atomic_exchange intrinsic:
5835 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5836 EXP is the CALL_EXPR.
5837 TARGET is an optional place for us to store the results. */
5840 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5843 enum memmodel model
;
5845 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5847 if (!flag_inline_atomics
)
5850 /* Expand the operands. */
5851 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5852 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5854 return expand_atomic_exchange (target
, mem
, val
, model
);
5857 /* Expand the __atomic_compare_exchange intrinsic:
5858 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5859 TYPE desired, BOOL weak,
5860 enum memmodel success,
5861 enum memmodel failure)
5862 EXP is the CALL_EXPR.
5863 TARGET is an optional place for us to store the results. */
5866 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5869 rtx expect
, desired
, mem
, oldval
;
5870 rtx_code_label
*label
;
5871 enum memmodel success
, failure
;
5875 = expansion_point_location_if_in_system_header (input_location
);
5877 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5878 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5880 if (failure
> success
)
5882 warning_at (loc
, OPT_Winvalid_memory_model
,
5883 "failure memory model cannot be stronger than success "
5884 "memory model for %<__atomic_compare_exchange%>");
5885 success
= MEMMODEL_SEQ_CST
;
5888 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5890 warning_at (loc
, OPT_Winvalid_memory_model
,
5891 "invalid failure memory model for "
5892 "%<__atomic_compare_exchange%>");
5893 failure
= MEMMODEL_SEQ_CST
;
5894 success
= MEMMODEL_SEQ_CST
;
5898 if (!flag_inline_atomics
)
5901 /* Expand the operands. */
5902 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5904 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5905 expect
= convert_memory_address (Pmode
, expect
);
5906 expect
= gen_rtx_MEM (mode
, expect
);
5907 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5909 weak
= CALL_EXPR_ARG (exp
, 3);
5911 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5914 if (target
== const0_rtx
)
5917 /* Lest the rtl backend create a race condition with an imporoper store
5918 to memory, always create a new pseudo for OLDVAL. */
5921 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5922 is_weak
, success
, failure
))
5925 /* Conditionally store back to EXPECT, lest we create a race condition
5926 with an improper store to memory. */
5927 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5928 the normal case where EXPECT is totally private, i.e. a register. At
5929 which point the store can be unconditional. */
5930 label
= gen_label_rtx ();
5931 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5932 GET_MODE (target
), 1, label
);
5933 emit_move_insn (expect
, oldval
);
5939 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5940 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5941 call. The weak parameter must be dropped to match the expected parameter
5942 list and the expected argument changed from value to pointer to memory
5946 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
5949 vec
<tree
, va_gc
> *vec
;
5952 vec
->quick_push (gimple_call_arg (call
, 0));
5953 tree expected
= gimple_call_arg (call
, 1);
5954 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
5955 TREE_TYPE (expected
));
5956 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
5958 emit_move_insn (x
, expd
);
5959 tree v
= make_tree (TREE_TYPE (expected
), x
);
5960 vec
->quick_push (build1 (ADDR_EXPR
,
5961 build_pointer_type (TREE_TYPE (expected
)), v
));
5962 vec
->quick_push (gimple_call_arg (call
, 2));
5963 /* Skip the boolean weak parameter. */
5964 for (z
= 4; z
< 6; z
++)
5965 vec
->quick_push (gimple_call_arg (call
, z
));
5966 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5967 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
5968 gcc_assert (bytes_log2
< 5);
5969 built_in_function fncode
5970 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5972 tree fndecl
= builtin_decl_explicit (fncode
);
5973 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
5975 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
5976 tree lhs
= gimple_call_lhs (call
);
5977 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
5980 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5981 if (GET_MODE (boolret
) != mode
)
5982 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5983 x
= force_reg (mode
, x
);
5984 write_complex_part (target
, boolret
, true);
5985 write_complex_part (target
, x
, false);
5989 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5992 expand_ifn_atomic_compare_exchange (gcall
*call
)
5994 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
5995 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
5996 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
5997 rtx expect
, desired
, mem
, oldval
, boolret
;
5998 enum memmodel success
, failure
;
6002 = expansion_point_location_if_in_system_header (gimple_location (call
));
6004 success
= get_memmodel (gimple_call_arg (call
, 4));
6005 failure
= get_memmodel (gimple_call_arg (call
, 5));
6007 if (failure
> success
)
6009 warning_at (loc
, OPT_Winvalid_memory_model
,
6010 "failure memory model cannot be stronger than success "
6011 "memory model for %<__atomic_compare_exchange%>");
6012 success
= MEMMODEL_SEQ_CST
;
6015 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6017 warning_at (loc
, OPT_Winvalid_memory_model
,
6018 "invalid failure memory model for "
6019 "%<__atomic_compare_exchange%>");
6020 failure
= MEMMODEL_SEQ_CST
;
6021 success
= MEMMODEL_SEQ_CST
;
6024 if (!flag_inline_atomics
)
6026 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6030 /* Expand the operands. */
6031 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6033 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6034 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6036 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6041 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6042 is_weak
, success
, failure
))
6044 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6048 lhs
= gimple_call_lhs (call
);
6051 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6052 if (GET_MODE (boolret
) != mode
)
6053 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6054 write_complex_part (target
, boolret
, true);
6055 write_complex_part (target
, oldval
, false);
6059 /* Expand the __atomic_load intrinsic:
6060 TYPE __atomic_load (TYPE *object, enum memmodel)
6061 EXP is the CALL_EXPR.
6062 TARGET is an optional place for us to store the results. */
6065 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6068 enum memmodel model
;
6070 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6071 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6074 = expansion_point_location_if_in_system_header (input_location
);
6075 warning_at (loc
, OPT_Winvalid_memory_model
,
6076 "invalid memory model for %<__atomic_load%>");
6077 model
= MEMMODEL_SEQ_CST
;
6080 if (!flag_inline_atomics
)
6083 /* Expand the operand. */
6084 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6086 return expand_atomic_load (target
, mem
, model
);
6090 /* Expand the __atomic_store intrinsic:
6091 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6092 EXP is the CALL_EXPR.
6093 TARGET is an optional place for us to store the results. */
6096 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6099 enum memmodel model
;
6101 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6102 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6103 || is_mm_release (model
)))
6106 = expansion_point_location_if_in_system_header (input_location
);
6107 warning_at (loc
, OPT_Winvalid_memory_model
,
6108 "invalid memory model for %<__atomic_store%>");
6109 model
= MEMMODEL_SEQ_CST
;
6112 if (!flag_inline_atomics
)
6115 /* Expand the operands. */
6116 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6117 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6119 return expand_atomic_store (mem
, val
, model
, false);
6122 /* Expand the __atomic_fetch_XXX intrinsic:
6123 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6124 EXP is the CALL_EXPR.
6125 TARGET is an optional place for us to store the results.
6126 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6127 FETCH_AFTER is true if returning the result of the operation.
6128 FETCH_AFTER is false if returning the value before the operation.
6129 IGNORE is true if the result is not used.
6130 EXT_CALL is the correct builtin for an external call if this cannot be
6131 resolved to an instruction sequence. */
6134 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6135 enum rtx_code code
, bool fetch_after
,
6136 bool ignore
, enum built_in_function ext_call
)
6139 enum memmodel model
;
6143 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6145 /* Expand the operands. */
6146 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6147 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6149 /* Only try generating instructions if inlining is turned on. */
6150 if (flag_inline_atomics
)
6152 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6157 /* Return if a different routine isn't needed for the library call. */
6158 if (ext_call
== BUILT_IN_NONE
)
6161 /* Change the call to the specified function. */
6162 fndecl
= get_callee_fndecl (exp
);
6163 addr
= CALL_EXPR_FN (exp
);
6166 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6167 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6169 /* If we will emit code after the call, the call cannot be a tail call.
6170 If it is emitted as a tail call, a barrier is emitted after it, and
6171 then all trailing code is removed. */
6173 CALL_EXPR_TAILCALL (exp
) = 0;
6175 /* Expand the call here so we can emit trailing code. */
6176 ret
= expand_call (exp
, target
, ignore
);
6178 /* Replace the original function just in case it matters. */
6179 TREE_OPERAND (addr
, 0) = fndecl
;
6181 /* Then issue the arithmetic correction to return the right result. */
6186 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6188 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6191 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6197 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6200 expand_ifn_atomic_bit_test_and (gcall
*call
)
6202 tree ptr
= gimple_call_arg (call
, 0);
6203 tree bit
= gimple_call_arg (call
, 1);
6204 tree flag
= gimple_call_arg (call
, 2);
6205 tree lhs
= gimple_call_lhs (call
);
6206 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6207 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6210 class expand_operand ops
[5];
6212 gcc_assert (flag_inline_atomics
);
6214 if (gimple_call_num_args (call
) == 4)
6215 model
= get_memmodel (gimple_call_arg (call
, 3));
6217 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6218 rtx val
= expand_expr_force_mode (bit
, mode
);
6220 switch (gimple_call_internal_fn (call
))
6222 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6224 optab
= atomic_bit_test_and_set_optab
;
6226 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6228 optab
= atomic_bit_test_and_complement_optab
;
6230 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6232 optab
= atomic_bit_test_and_reset_optab
;
6238 if (lhs
== NULL_TREE
)
6240 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6241 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6243 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6244 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6248 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6249 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6250 gcc_assert (icode
!= CODE_FOR_nothing
);
6251 create_output_operand (&ops
[0], target
, mode
);
6252 create_fixed_operand (&ops
[1], mem
);
6253 create_convert_operand_to (&ops
[2], val
, mode
, true);
6254 create_integer_operand (&ops
[3], model
);
6255 create_integer_operand (&ops
[4], integer_onep (flag
));
6256 if (maybe_expand_insn (icode
, 5, ops
))
6260 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6261 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6264 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6265 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6266 code
, model
, false);
6267 if (integer_onep (flag
))
6269 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6270 NULL_RTX
, true, OPTAB_DIRECT
);
6271 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6272 true, OPTAB_DIRECT
);
6275 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6277 if (result
!= target
)
6278 emit_move_insn (target
, result
);
6281 /* Expand an atomic clear operation.
6282 void _atomic_clear (BOOL *obj, enum memmodel)
6283 EXP is the call expression. */
6286 expand_builtin_atomic_clear (tree exp
)
6290 enum memmodel model
;
6292 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6293 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6294 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6296 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6299 = expansion_point_location_if_in_system_header (input_location
);
6300 warning_at (loc
, OPT_Winvalid_memory_model
,
6301 "invalid memory model for %<__atomic_store%>");
6302 model
= MEMMODEL_SEQ_CST
;
6305 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6306 Failing that, a store is issued by __atomic_store. The only way this can
6307 fail is if the bool type is larger than a word size. Unlikely, but
6308 handle it anyway for completeness. Assume a single threaded model since
6309 there is no atomic support in this case, and no barriers are required. */
6310 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6312 emit_move_insn (mem
, const0_rtx
);
6316 /* Expand an atomic test_and_set operation.
6317 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6318 EXP is the call expression. */
6321 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6324 enum memmodel model
;
6327 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6328 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6329 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6331 return expand_atomic_test_and_set (target
, mem
, model
);
6335 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6336 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6339 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6343 unsigned int mode_align
, type_align
;
6345 if (TREE_CODE (arg0
) != INTEGER_CST
)
6348 /* We need a corresponding integer mode for the access to be lock-free. */
6349 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6350 if (!int_mode_for_size (size
, 0).exists (&mode
))
6351 return boolean_false_node
;
6353 mode_align
= GET_MODE_ALIGNMENT (mode
);
6355 if (TREE_CODE (arg1
) == INTEGER_CST
)
6357 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6359 /* Either this argument is null, or it's a fake pointer encoding
6360 the alignment of the object. */
6361 val
= least_bit_hwi (val
);
6362 val
*= BITS_PER_UNIT
;
6364 if (val
== 0 || mode_align
< val
)
6365 type_align
= mode_align
;
6371 tree ttype
= TREE_TYPE (arg1
);
6373 /* This function is usually invoked and folded immediately by the front
6374 end before anything else has a chance to look at it. The pointer
6375 parameter at this point is usually cast to a void *, so check for that
6376 and look past the cast. */
6377 if (CONVERT_EXPR_P (arg1
)
6378 && POINTER_TYPE_P (ttype
)
6379 && VOID_TYPE_P (TREE_TYPE (ttype
))
6380 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6381 arg1
= TREE_OPERAND (arg1
, 0);
6383 ttype
= TREE_TYPE (arg1
);
6384 gcc_assert (POINTER_TYPE_P (ttype
));
6386 /* Get the underlying type of the object. */
6387 ttype
= TREE_TYPE (ttype
);
6388 type_align
= TYPE_ALIGN (ttype
);
6391 /* If the object has smaller alignment, the lock free routines cannot
6393 if (type_align
< mode_align
)
6394 return boolean_false_node
;
6396 /* Check if a compare_and_swap pattern exists for the mode which represents
6397 the required size. The pattern is not allowed to fail, so the existence
6398 of the pattern indicates support is present. Also require that an
6399 atomic load exists for the required size. */
6400 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6401 return boolean_true_node
;
6403 return boolean_false_node
;
6406 /* Return true if the parameters to call EXP represent an object which will
6407 always generate lock free instructions. The first argument represents the
6408 size of the object, and the second parameter is a pointer to the object
6409 itself. If NULL is passed for the object, then the result is based on
6410 typical alignment for an object of the specified size. Otherwise return
6414 expand_builtin_atomic_always_lock_free (tree exp
)
6417 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6418 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6420 if (TREE_CODE (arg0
) != INTEGER_CST
)
6422 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6426 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6427 if (size
== boolean_true_node
)
6432 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6433 is lock free on this architecture. */
6436 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6438 if (!flag_inline_atomics
)
6441 /* If it isn't always lock free, don't generate a result. */
6442 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6443 return boolean_true_node
;
6448 /* Return true if the parameters to call EXP represent an object which will
6449 always generate lock free instructions. The first argument represents the
6450 size of the object, and the second parameter is a pointer to the object
6451 itself. If NULL is passed for the object, then the result is based on
6452 typical alignment for an object of the specified size. Otherwise return
6456 expand_builtin_atomic_is_lock_free (tree exp
)
6459 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6460 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6462 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6464 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6468 if (!flag_inline_atomics
)
6471 /* If the value is known at compile time, return the RTX for it. */
6472 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6473 if (size
== boolean_true_node
)
6479 /* Expand the __atomic_thread_fence intrinsic:
6480 void __atomic_thread_fence (enum memmodel)
6481 EXP is the CALL_EXPR. */
6484 expand_builtin_atomic_thread_fence (tree exp
)
6486 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6487 expand_mem_thread_fence (model
);
6490 /* Expand the __atomic_signal_fence intrinsic:
6491 void __atomic_signal_fence (enum memmodel)
6492 EXP is the CALL_EXPR. */
6495 expand_builtin_atomic_signal_fence (tree exp
)
6497 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6498 expand_mem_signal_fence (model
);
6501 /* Expand the __sync_synchronize intrinsic. */
6504 expand_builtin_sync_synchronize (void)
6506 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6510 expand_builtin_thread_pointer (tree exp
, rtx target
)
6512 enum insn_code icode
;
6513 if (!validate_arglist (exp
, VOID_TYPE
))
6515 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6516 if (icode
!= CODE_FOR_nothing
)
6518 class expand_operand op
;
6519 /* If the target is not sutitable then create a new target. */
6520 if (target
== NULL_RTX
6522 || GET_MODE (target
) != Pmode
)
6523 target
= gen_reg_rtx (Pmode
);
6524 create_output_operand (&op
, target
, Pmode
);
6525 expand_insn (icode
, 1, &op
);
6528 error ("%<__builtin_thread_pointer%> is not supported on this target");
6533 expand_builtin_set_thread_pointer (tree exp
)
6535 enum insn_code icode
;
6536 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6538 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6539 if (icode
!= CODE_FOR_nothing
)
6541 class expand_operand op
;
6542 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6543 Pmode
, EXPAND_NORMAL
);
6544 create_input_operand (&op
, val
, Pmode
);
6545 expand_insn (icode
, 1, &op
);
6548 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6552 /* Emit code to restore the current value of stack. */
6555 expand_stack_restore (tree var
)
6558 rtx sa
= expand_normal (var
);
6560 sa
= convert_memory_address (Pmode
, sa
);
6562 prev
= get_last_insn ();
6563 emit_stack_restore (SAVE_BLOCK
, sa
);
6565 record_new_stack_level ();
6567 fixup_args_size_notes (prev
, get_last_insn (), 0);
6570 /* Emit code to save the current value of stack. */
6573 expand_stack_save (void)
6577 emit_stack_save (SAVE_BLOCK
, &ret
);
6581 /* Emit code to get the openacc gang, worker or vector id or size. */
6584 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
6587 rtx fallback_retval
;
6588 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
6589 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
6591 case BUILT_IN_GOACC_PARLEVEL_ID
:
6592 name
= "__builtin_goacc_parlevel_id";
6593 fallback_retval
= const0_rtx
;
6594 gen_fn
= targetm
.gen_oacc_dim_pos
;
6596 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
6597 name
= "__builtin_goacc_parlevel_size";
6598 fallback_retval
= const1_rtx
;
6599 gen_fn
= targetm
.gen_oacc_dim_size
;
6605 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
6607 error ("%qs only supported in OpenACC code", name
);
6611 tree arg
= CALL_EXPR_ARG (exp
, 0);
6612 if (TREE_CODE (arg
) != INTEGER_CST
)
6614 error ("non-constant argument 0 to %qs", name
);
6618 int dim
= TREE_INT_CST_LOW (arg
);
6622 case GOMP_DIM_WORKER
:
6623 case GOMP_DIM_VECTOR
:
6626 error ("illegal argument 0 to %qs", name
);
6633 if (target
== NULL_RTX
)
6634 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6636 if (!targetm
.have_oacc_dim_size ())
6638 emit_move_insn (target
, fallback_retval
);
6642 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
6643 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
6645 emit_move_insn (target
, reg
);
6650 /* Expand a string compare operation using a sequence of char comparison
6651 to get rid of the calling overhead, with result going to TARGET if
6654 VAR_STR is the variable string source;
6655 CONST_STR is the constant string source;
6656 LENGTH is the number of chars to compare;
6657 CONST_STR_N indicates which source string is the constant string;
6658 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6660 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6662 target = (int) (unsigned char) var_str[0]
6663 - (int) (unsigned char) const_str[0];
6667 target = (int) (unsigned char) var_str[length - 2]
6668 - (int) (unsigned char) const_str[length - 2];
6671 target = (int) (unsigned char) var_str[length - 1]
6672 - (int) (unsigned char) const_str[length - 1];
6677 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
6678 unsigned HOST_WIDE_INT length
,
6679 int const_str_n
, machine_mode mode
)
6681 HOST_WIDE_INT offset
= 0;
6683 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
6684 rtx var_rtx
= NULL_RTX
;
6685 rtx const_rtx
= NULL_RTX
;
6686 rtx result
= target
? target
: gen_reg_rtx (mode
);
6687 rtx_code_label
*ne_label
= gen_label_rtx ();
6688 tree unit_type_node
= unsigned_char_type_node
;
6689 scalar_int_mode unit_mode
6690 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
6694 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
6697 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
6698 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
6699 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
6700 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
6702 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
6703 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
6704 result
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
6705 result
, 1, OPTAB_WIDEN
);
6707 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
6708 mode
, true, ne_label
);
6709 offset
+= GET_MODE_SIZE (unit_mode
);
6712 emit_label (ne_label
);
6713 rtx_insn
*insns
= get_insns ();
6720 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6721 to TARGET if that's convenient.
6722 If the call is not been inlined, return NULL_RTX. */
6725 inline_expand_builtin_bytecmp (tree exp
, rtx target
)
6727 tree fndecl
= get_callee_fndecl (exp
);
6728 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6729 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
6731 /* Do NOT apply this inlining expansion when optimizing for size or
6732 optimization level below 2. */
6733 if (optimize
< 2 || optimize_insn_for_size_p ())
6736 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
6737 || fcode
== BUILT_IN_STRNCMP
6738 || fcode
== BUILT_IN_MEMCMP
);
6740 /* On a target where the type of the call (int) has same or narrower presicion
6741 than unsigned char, give up the inlining expansion. */
6742 if (TYPE_PRECISION (unsigned_char_type_node
)
6743 >= TYPE_PRECISION (TREE_TYPE (exp
)))
6746 tree arg1
= CALL_EXPR_ARG (exp
, 0);
6747 tree arg2
= CALL_EXPR_ARG (exp
, 1);
6748 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
6750 unsigned HOST_WIDE_INT len1
= 0;
6751 unsigned HOST_WIDE_INT len2
= 0;
6752 unsigned HOST_WIDE_INT len3
= 0;
6754 /* Get the object representation of the initializers of ARG1 and ARG2
6755 as strings, provided they refer to constant objects, with their byte
6756 sizes in LEN1 and LEN2, respectively. */
6757 const char *bytes1
= getbyterep (arg1
, &len1
);
6758 const char *bytes2
= getbyterep (arg2
, &len2
);
6760 /* Fail if neither argument refers to an initialized constant. */
6761 if (!bytes1
&& !bytes2
)
6766 /* Fail if the memcmp/strncmp bound is not a constant. */
6767 if (!tree_fits_uhwi_p (len3_tree
))
6770 len3
= tree_to_uhwi (len3_tree
);
6772 if (fcode
== BUILT_IN_MEMCMP
)
6774 /* Fail if the memcmp bound is greater than the size of either
6775 of the two constant objects. */
6776 if ((bytes1
&& len1
< len3
)
6777 || (bytes2
&& len2
< len3
))
6782 if (fcode
!= BUILT_IN_MEMCMP
)
6784 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6785 and LEN2 to the length of the nul-terminated string stored
6788 len1
= strnlen (bytes1
, len1
) + 1;
6790 len2
= strnlen (bytes2
, len2
) + 1;
6793 /* See inline_string_cmp. */
6799 else if (len2
> len1
)
6804 /* For strncmp only, compute the new bound as the smallest of
6805 the lengths of the two strings (plus 1) and the bound provided
6807 unsigned HOST_WIDE_INT bound
= (const_str_n
== 1) ? len1
: len2
;
6808 if (is_ncmp
&& len3
< bound
)
6811 /* If the bound of the comparison is larger than the threshold,
6813 if (bound
> (unsigned HOST_WIDE_INT
) param_builtin_string_cmp_inline_length
)
6816 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
6818 /* Now, start inline expansion the call. */
6819 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
6820 (const_str_n
== 1) ? bytes1
: bytes2
, bound
,
6824 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6825 represents the size of the first argument to that call, or VOIDmode
6826 if the argument is a pointer. IGNORE will be true if the result
6829 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
6833 unsigned nargs
= call_expr_nargs (exp
);
6835 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6837 if (mode
== VOIDmode
)
6839 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6840 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
6843 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6845 /* An optional second argument can be used as a failsafe value on
6846 some machines. If it isn't present, then the failsafe value is
6850 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6851 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6854 failsafe
= const0_rtx
;
6856 /* If the result isn't used, the behavior is undefined. It would be
6857 nice to emit a warning here, but path splitting means this might
6858 happen with legitimate code. So simply drop the builtin
6859 expansion in that case; we've handled any side-effects above. */
6863 /* If we don't have a suitable target, create one to hold the result. */
6864 if (target
== NULL
|| GET_MODE (target
) != mode
)
6865 target
= gen_reg_rtx (mode
);
6867 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
6868 val
= convert_modes (mode
, VOIDmode
, val
, false);
6870 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
6873 /* Expand an expression EXP that calls a built-in function,
6874 with result going to TARGET if that's convenient
6875 (and in mode MODE if that's convenient).
6876 SUBTARGET may be used as the target for computing one of EXP's operands.
6877 IGNORE is nonzero if the value is to be ignored. */
6880 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
6883 tree fndecl
= get_callee_fndecl (exp
);
6884 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6887 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6888 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6890 /* When ASan is enabled, we don't want to expand some memory/string
6891 builtins and rely on libsanitizer's hooks. This allows us to avoid
6892 redundant checks and be sure, that possible overflow will be detected
6895 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6896 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
6897 return expand_call (exp
, target
, ignore
);
6899 /* When not optimizing, generate calls to library functions for a certain
6902 && !called_as_built_in (fndecl
)
6903 && fcode
!= BUILT_IN_FORK
6904 && fcode
!= BUILT_IN_EXECL
6905 && fcode
!= BUILT_IN_EXECV
6906 && fcode
!= BUILT_IN_EXECLP
6907 && fcode
!= BUILT_IN_EXECLE
6908 && fcode
!= BUILT_IN_EXECVP
6909 && fcode
!= BUILT_IN_EXECVE
6910 && fcode
!= BUILT_IN_CLEAR_CACHE
6911 && !ALLOCA_FUNCTION_CODE_P (fcode
)
6912 && fcode
!= BUILT_IN_FREE
)
6913 return expand_call (exp
, target
, ignore
);
6915 /* The built-in function expanders test for target == const0_rtx
6916 to determine whether the function's result will be ignored. */
6918 target
= const0_rtx
;
6920 /* If the result of a pure or const built-in function is ignored, and
6921 none of its arguments are volatile, we can avoid expanding the
6922 built-in call and just evaluate the arguments for side-effects. */
6923 if (target
== const0_rtx
6924 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6925 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6927 bool volatilep
= false;
6929 call_expr_arg_iterator iter
;
6931 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6932 if (TREE_THIS_VOLATILE (arg
))
6940 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6941 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6948 CASE_FLT_FN (BUILT_IN_FABS
):
6949 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
6950 case BUILT_IN_FABSD32
:
6951 case BUILT_IN_FABSD64
:
6952 case BUILT_IN_FABSD128
:
6953 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6958 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6959 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
6960 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6965 /* Just do a normal library call if we were unable to fold
6967 CASE_FLT_FN (BUILT_IN_CABS
):
6970 CASE_FLT_FN (BUILT_IN_FMA
):
6971 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
6972 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6977 CASE_FLT_FN (BUILT_IN_ILOGB
):
6978 if (! flag_unsafe_math_optimizations
)
6981 CASE_FLT_FN (BUILT_IN_ISINF
):
6982 CASE_FLT_FN (BUILT_IN_FINITE
):
6983 case BUILT_IN_ISFINITE
:
6984 case BUILT_IN_ISNORMAL
:
6985 target
= expand_builtin_interclass_mathfn (exp
, target
);
6990 CASE_FLT_FN (BUILT_IN_ICEIL
):
6991 CASE_FLT_FN (BUILT_IN_LCEIL
):
6992 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6993 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6994 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6995 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6996 target
= expand_builtin_int_roundingfn (exp
, target
);
7001 CASE_FLT_FN (BUILT_IN_IRINT
):
7002 CASE_FLT_FN (BUILT_IN_LRINT
):
7003 CASE_FLT_FN (BUILT_IN_LLRINT
):
7004 CASE_FLT_FN (BUILT_IN_IROUND
):
7005 CASE_FLT_FN (BUILT_IN_LROUND
):
7006 CASE_FLT_FN (BUILT_IN_LLROUND
):
7007 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
7012 CASE_FLT_FN (BUILT_IN_POWI
):
7013 target
= expand_builtin_powi (exp
, target
);
7018 CASE_FLT_FN (BUILT_IN_CEXPI
):
7019 target
= expand_builtin_cexpi (exp
, target
);
7020 gcc_assert (target
);
7023 CASE_FLT_FN (BUILT_IN_SIN
):
7024 CASE_FLT_FN (BUILT_IN_COS
):
7025 if (! flag_unsafe_math_optimizations
)
7027 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
7032 CASE_FLT_FN (BUILT_IN_SINCOS
):
7033 if (! flag_unsafe_math_optimizations
)
7035 target
= expand_builtin_sincos (exp
);
7040 case BUILT_IN_APPLY_ARGS
:
7041 return expand_builtin_apply_args ();
7043 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7044 FUNCTION with a copy of the parameters described by
7045 ARGUMENTS, and ARGSIZE. It returns a block of memory
7046 allocated on the stack into which is stored all the registers
7047 that might possibly be used for returning the result of a
7048 function. ARGUMENTS is the value returned by
7049 __builtin_apply_args. ARGSIZE is the number of bytes of
7050 arguments that must be copied. ??? How should this value be
7051 computed? We'll also need a safe worst case value for varargs
7053 case BUILT_IN_APPLY
:
7054 if (!validate_arglist (exp
, POINTER_TYPE
,
7055 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
7056 && !validate_arglist (exp
, REFERENCE_TYPE
,
7057 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7063 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
7064 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
7065 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
7067 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7070 /* __builtin_return (RESULT) causes the function to return the
7071 value described by RESULT. RESULT is address of the block of
7072 memory returned by __builtin_apply. */
7073 case BUILT_IN_RETURN
:
7074 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7075 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
7078 case BUILT_IN_SAVEREGS
:
7079 return expand_builtin_saveregs ();
7081 case BUILT_IN_VA_ARG_PACK
:
7082 /* All valid uses of __builtin_va_arg_pack () are removed during
7084 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7087 case BUILT_IN_VA_ARG_PACK_LEN
:
7088 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7090 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7093 /* Return the address of the first anonymous stack arg. */
7094 case BUILT_IN_NEXT_ARG
:
7095 if (fold_builtin_next_arg (exp
, false))
7097 return expand_builtin_next_arg ();
7099 case BUILT_IN_CLEAR_CACHE
:
7100 expand_builtin___clear_cache (exp
);
7103 case BUILT_IN_CLASSIFY_TYPE
:
7104 return expand_builtin_classify_type (exp
);
7106 case BUILT_IN_CONSTANT_P
:
7109 case BUILT_IN_FRAME_ADDRESS
:
7110 case BUILT_IN_RETURN_ADDRESS
:
7111 return expand_builtin_frame_address (fndecl
, exp
);
7113 /* Returns the address of the area where the structure is returned.
7115 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
7116 if (call_expr_nargs (exp
) != 0
7117 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
7118 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
7121 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
7123 CASE_BUILT_IN_ALLOCA
:
7124 target
= expand_builtin_alloca (exp
);
7129 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
7130 return expand_asan_emit_allocas_unpoison (exp
);
7132 case BUILT_IN_STACK_SAVE
:
7133 return expand_stack_save ();
7135 case BUILT_IN_STACK_RESTORE
:
7136 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
7139 case BUILT_IN_BSWAP16
:
7140 case BUILT_IN_BSWAP32
:
7141 case BUILT_IN_BSWAP64
:
7142 case BUILT_IN_BSWAP128
:
7143 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
7148 CASE_INT_FN (BUILT_IN_FFS
):
7149 target
= expand_builtin_unop (target_mode
, exp
, target
,
7150 subtarget
, ffs_optab
);
7155 CASE_INT_FN (BUILT_IN_CLZ
):
7156 target
= expand_builtin_unop (target_mode
, exp
, target
,
7157 subtarget
, clz_optab
);
7162 CASE_INT_FN (BUILT_IN_CTZ
):
7163 target
= expand_builtin_unop (target_mode
, exp
, target
,
7164 subtarget
, ctz_optab
);
7169 CASE_INT_FN (BUILT_IN_CLRSB
):
7170 target
= expand_builtin_unop (target_mode
, exp
, target
,
7171 subtarget
, clrsb_optab
);
7176 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7177 target
= expand_builtin_unop (target_mode
, exp
, target
,
7178 subtarget
, popcount_optab
);
7183 CASE_INT_FN (BUILT_IN_PARITY
):
7184 target
= expand_builtin_unop (target_mode
, exp
, target
,
7185 subtarget
, parity_optab
);
7190 case BUILT_IN_STRLEN
:
7191 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7196 case BUILT_IN_STRNLEN
:
7197 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
7202 case BUILT_IN_STRCPY
:
7203 target
= expand_builtin_strcpy (exp
, target
);
7208 case BUILT_IN_STRNCPY
:
7209 target
= expand_builtin_strncpy (exp
, target
);
7214 case BUILT_IN_STPCPY
:
7215 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7220 case BUILT_IN_MEMCPY
:
7221 target
= expand_builtin_memcpy (exp
, target
);
7226 case BUILT_IN_MEMMOVE
:
7227 target
= expand_builtin_memmove (exp
, target
);
7232 case BUILT_IN_MEMPCPY
:
7233 target
= expand_builtin_mempcpy (exp
, target
);
7238 case BUILT_IN_MEMSET
:
7239 target
= expand_builtin_memset (exp
, target
, mode
);
7244 case BUILT_IN_BZERO
:
7245 target
= expand_builtin_bzero (exp
);
7250 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7251 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7252 when changing it to a strcmp call. */
7253 case BUILT_IN_STRCMP_EQ
:
7254 target
= expand_builtin_memcmp (exp
, target
, true);
7258 /* Change this call back to a BUILT_IN_STRCMP. */
7259 TREE_OPERAND (exp
, 1)
7260 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
7262 /* Delete the last parameter. */
7264 vec
<tree
, va_gc
> *arg_vec
;
7265 vec_alloc (arg_vec
, 2);
7266 for (i
= 0; i
< 2; i
++)
7267 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
7268 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
7271 case BUILT_IN_STRCMP
:
7272 target
= expand_builtin_strcmp (exp
, target
);
7277 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7278 back to a BUILT_IN_STRNCMP. */
7279 case BUILT_IN_STRNCMP_EQ
:
7280 target
= expand_builtin_memcmp (exp
, target
, true);
7284 /* Change it back to a BUILT_IN_STRNCMP. */
7285 TREE_OPERAND (exp
, 1)
7286 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
7289 case BUILT_IN_STRNCMP
:
7290 target
= expand_builtin_strncmp (exp
, target
, mode
);
7296 case BUILT_IN_MEMCMP
:
7297 case BUILT_IN_MEMCMP_EQ
:
7298 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7301 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7303 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7304 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7308 case BUILT_IN_SETJMP
:
7309 /* This should have been lowered to the builtins below. */
7312 case BUILT_IN_SETJMP_SETUP
:
7313 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7314 and the receiver label. */
7315 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7317 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7318 VOIDmode
, EXPAND_NORMAL
);
7319 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7320 rtx_insn
*label_r
= label_rtx (label
);
7322 /* This is copied from the handling of non-local gotos. */
7323 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7324 nonlocal_goto_handler_labels
7325 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7326 nonlocal_goto_handler_labels
);
7327 /* ??? Do not let expand_label treat us as such since we would
7328 not want to be both on the list of non-local labels and on
7329 the list of forced labels. */
7330 FORCED_LABEL (label
) = 0;
7335 case BUILT_IN_SETJMP_RECEIVER
:
7336 /* __builtin_setjmp_receiver is passed the receiver label. */
7337 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7339 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7340 rtx_insn
*label_r
= label_rtx (label
);
7342 expand_builtin_setjmp_receiver (label_r
);
7347 /* __builtin_longjmp is passed a pointer to an array of five words.
7348 It's similar to the C library longjmp function but works with
7349 __builtin_setjmp above. */
7350 case BUILT_IN_LONGJMP
:
7351 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7353 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7354 VOIDmode
, EXPAND_NORMAL
);
7355 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7357 if (value
!= const1_rtx
)
7359 error ("%<__builtin_longjmp%> second argument must be 1");
7363 expand_builtin_longjmp (buf_addr
, value
);
7368 case BUILT_IN_NONLOCAL_GOTO
:
7369 target
= expand_builtin_nonlocal_goto (exp
);
7374 /* This updates the setjmp buffer that is its argument with the value
7375 of the current stack pointer. */
7376 case BUILT_IN_UPDATE_SETJMP_BUF
:
7377 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7380 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7382 expand_builtin_update_setjmp_buf (buf_addr
);
7388 expand_builtin_trap ();
7391 case BUILT_IN_UNREACHABLE
:
7392 expand_builtin_unreachable ();
7395 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7396 case BUILT_IN_SIGNBITD32
:
7397 case BUILT_IN_SIGNBITD64
:
7398 case BUILT_IN_SIGNBITD128
:
7399 target
= expand_builtin_signbit (exp
, target
);
7404 /* Various hooks for the DWARF 2 __throw routine. */
7405 case BUILT_IN_UNWIND_INIT
:
7406 expand_builtin_unwind_init ();
7408 case BUILT_IN_DWARF_CFA
:
7409 return virtual_cfa_rtx
;
7410 #ifdef DWARF2_UNWIND_INFO
7411 case BUILT_IN_DWARF_SP_COLUMN
:
7412 return expand_builtin_dwarf_sp_column ();
7413 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7414 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7417 case BUILT_IN_FROB_RETURN_ADDR
:
7418 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7419 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7420 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7421 case BUILT_IN_EH_RETURN
:
7422 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7423 CALL_EXPR_ARG (exp
, 1));
7425 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7426 return expand_builtin_eh_return_data_regno (exp
);
7427 case BUILT_IN_EXTEND_POINTER
:
7428 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7429 case BUILT_IN_EH_POINTER
:
7430 return expand_builtin_eh_pointer (exp
);
7431 case BUILT_IN_EH_FILTER
:
7432 return expand_builtin_eh_filter (exp
);
7433 case BUILT_IN_EH_COPY_VALUES
:
7434 return expand_builtin_eh_copy_values (exp
);
7436 case BUILT_IN_VA_START
:
7437 return expand_builtin_va_start (exp
);
7438 case BUILT_IN_VA_END
:
7439 return expand_builtin_va_end (exp
);
7440 case BUILT_IN_VA_COPY
:
7441 return expand_builtin_va_copy (exp
);
7442 case BUILT_IN_EXPECT
:
7443 return expand_builtin_expect (exp
, target
);
7444 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
7445 return expand_builtin_expect_with_probability (exp
, target
);
7446 case BUILT_IN_ASSUME_ALIGNED
:
7447 return expand_builtin_assume_aligned (exp
, target
);
7448 case BUILT_IN_PREFETCH
:
7449 expand_builtin_prefetch (exp
);
7452 case BUILT_IN_INIT_TRAMPOLINE
:
7453 return expand_builtin_init_trampoline (exp
, true);
7454 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7455 return expand_builtin_init_trampoline (exp
, false);
7456 case BUILT_IN_ADJUST_TRAMPOLINE
:
7457 return expand_builtin_adjust_trampoline (exp
);
7459 case BUILT_IN_INIT_DESCRIPTOR
:
7460 return expand_builtin_init_descriptor (exp
);
7461 case BUILT_IN_ADJUST_DESCRIPTOR
:
7462 return expand_builtin_adjust_descriptor (exp
);
7465 case BUILT_IN_EXECL
:
7466 case BUILT_IN_EXECV
:
7467 case BUILT_IN_EXECLP
:
7468 case BUILT_IN_EXECLE
:
7469 case BUILT_IN_EXECVP
:
7470 case BUILT_IN_EXECVE
:
7471 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7476 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7477 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7478 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7479 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7480 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7481 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7482 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7487 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7488 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7489 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7490 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7491 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7492 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7493 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7498 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7499 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7500 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7501 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7502 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7503 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7504 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7509 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7510 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7511 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7512 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7513 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7514 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7515 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7520 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7521 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7522 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7523 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7524 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7525 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7526 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7531 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7532 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7533 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7534 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7535 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7536 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7537 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7542 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7543 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7544 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7545 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7546 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7547 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7548 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7553 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7554 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7555 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7556 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7557 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7558 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7559 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7564 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7565 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7566 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7567 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7568 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7569 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7570 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7575 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7576 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7577 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7578 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7579 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7580 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7581 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7586 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7587 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7588 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7589 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7590 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7591 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7592 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7597 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7598 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7599 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7600 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7601 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7602 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7603 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7608 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7609 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7610 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7611 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7612 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7613 if (mode
== VOIDmode
)
7614 mode
= TYPE_MODE (boolean_type_node
);
7615 if (!target
|| !register_operand (target
, mode
))
7616 target
= gen_reg_rtx (mode
);
7618 mode
= get_builtin_sync_mode
7619 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7620 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7625 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7626 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7627 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7628 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7629 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7630 mode
= get_builtin_sync_mode
7631 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7632 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7637 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7638 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7639 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7640 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7641 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7642 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7643 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7648 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
7649 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
7650 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
7651 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
7652 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
7653 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
7654 expand_builtin_sync_lock_release (mode
, exp
);
7657 case BUILT_IN_SYNC_SYNCHRONIZE
:
7658 expand_builtin_sync_synchronize ();
7661 case BUILT_IN_ATOMIC_EXCHANGE_1
:
7662 case BUILT_IN_ATOMIC_EXCHANGE_2
:
7663 case BUILT_IN_ATOMIC_EXCHANGE_4
:
7664 case BUILT_IN_ATOMIC_EXCHANGE_8
:
7665 case BUILT_IN_ATOMIC_EXCHANGE_16
:
7666 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
7667 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
7672 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
7673 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
7674 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
7675 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
7676 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
7678 unsigned int nargs
, z
;
7679 vec
<tree
, va_gc
> *vec
;
7682 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
7683 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
7687 /* If this is turned into an external library call, the weak parameter
7688 must be dropped to match the expected parameter list. */
7689 nargs
= call_expr_nargs (exp
);
7690 vec_alloc (vec
, nargs
- 1);
7691 for (z
= 0; z
< 3; z
++)
7692 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7693 /* Skip the boolean weak parameter. */
7694 for (z
= 4; z
< 6; z
++)
7695 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7696 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
7700 case BUILT_IN_ATOMIC_LOAD_1
:
7701 case BUILT_IN_ATOMIC_LOAD_2
:
7702 case BUILT_IN_ATOMIC_LOAD_4
:
7703 case BUILT_IN_ATOMIC_LOAD_8
:
7704 case BUILT_IN_ATOMIC_LOAD_16
:
7705 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
7706 target
= expand_builtin_atomic_load (mode
, exp
, target
);
7711 case BUILT_IN_ATOMIC_STORE_1
:
7712 case BUILT_IN_ATOMIC_STORE_2
:
7713 case BUILT_IN_ATOMIC_STORE_4
:
7714 case BUILT_IN_ATOMIC_STORE_8
:
7715 case BUILT_IN_ATOMIC_STORE_16
:
7716 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
7717 target
= expand_builtin_atomic_store (mode
, exp
);
7722 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
7723 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
7724 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
7725 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
7726 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
7728 enum built_in_function lib
;
7729 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
7730 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
7731 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
7732 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
7738 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
7739 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
7740 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
7741 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
7742 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
7744 enum built_in_function lib
;
7745 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
7746 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
7747 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
7748 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
7754 case BUILT_IN_ATOMIC_AND_FETCH_1
:
7755 case BUILT_IN_ATOMIC_AND_FETCH_2
:
7756 case BUILT_IN_ATOMIC_AND_FETCH_4
:
7757 case BUILT_IN_ATOMIC_AND_FETCH_8
:
7758 case BUILT_IN_ATOMIC_AND_FETCH_16
:
7760 enum built_in_function lib
;
7761 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
7762 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
7763 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
7764 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
7770 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
7771 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
7772 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
7773 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
7774 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
7776 enum built_in_function lib
;
7777 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
7778 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
7779 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
7780 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
7786 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
7787 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
7788 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
7789 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
7790 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
7792 enum built_in_function lib
;
7793 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
7794 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
7795 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
7796 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
7802 case BUILT_IN_ATOMIC_OR_FETCH_1
:
7803 case BUILT_IN_ATOMIC_OR_FETCH_2
:
7804 case BUILT_IN_ATOMIC_OR_FETCH_4
:
7805 case BUILT_IN_ATOMIC_OR_FETCH_8
:
7806 case BUILT_IN_ATOMIC_OR_FETCH_16
:
7808 enum built_in_function lib
;
7809 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
7810 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
7811 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
7812 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
7818 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
7819 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
7820 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
7821 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
7822 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
7823 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
7824 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
7825 ignore
, BUILT_IN_NONE
);
7830 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
7831 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
7832 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
7833 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
7834 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
7835 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
7836 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
7837 ignore
, BUILT_IN_NONE
);
7842 case BUILT_IN_ATOMIC_FETCH_AND_1
:
7843 case BUILT_IN_ATOMIC_FETCH_AND_2
:
7844 case BUILT_IN_ATOMIC_FETCH_AND_4
:
7845 case BUILT_IN_ATOMIC_FETCH_AND_8
:
7846 case BUILT_IN_ATOMIC_FETCH_AND_16
:
7847 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
7848 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
7849 ignore
, BUILT_IN_NONE
);
7854 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
7855 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
7856 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
7857 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
7858 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
7859 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
7860 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
7861 ignore
, BUILT_IN_NONE
);
7866 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
7867 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
7868 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
7869 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
7870 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
7871 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
7872 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
7873 ignore
, BUILT_IN_NONE
);
7878 case BUILT_IN_ATOMIC_FETCH_OR_1
:
7879 case BUILT_IN_ATOMIC_FETCH_OR_2
:
7880 case BUILT_IN_ATOMIC_FETCH_OR_4
:
7881 case BUILT_IN_ATOMIC_FETCH_OR_8
:
7882 case BUILT_IN_ATOMIC_FETCH_OR_16
:
7883 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
7884 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
7885 ignore
, BUILT_IN_NONE
);
7890 case BUILT_IN_ATOMIC_TEST_AND_SET
:
7891 return expand_builtin_atomic_test_and_set (exp
, target
);
7893 case BUILT_IN_ATOMIC_CLEAR
:
7894 return expand_builtin_atomic_clear (exp
);
7896 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
7897 return expand_builtin_atomic_always_lock_free (exp
);
7899 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
7900 target
= expand_builtin_atomic_is_lock_free (exp
);
7905 case BUILT_IN_ATOMIC_THREAD_FENCE
:
7906 expand_builtin_atomic_thread_fence (exp
);
7909 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
7910 expand_builtin_atomic_signal_fence (exp
);
7913 case BUILT_IN_OBJECT_SIZE
:
7914 return expand_builtin_object_size (exp
);
7916 case BUILT_IN_MEMCPY_CHK
:
7917 case BUILT_IN_MEMPCPY_CHK
:
7918 case BUILT_IN_MEMMOVE_CHK
:
7919 case BUILT_IN_MEMSET_CHK
:
7920 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7925 case BUILT_IN_STRCPY_CHK
:
7926 case BUILT_IN_STPCPY_CHK
:
7927 case BUILT_IN_STRNCPY_CHK
:
7928 case BUILT_IN_STPNCPY_CHK
:
7929 case BUILT_IN_STRCAT_CHK
:
7930 case BUILT_IN_STRNCAT_CHK
:
7931 case BUILT_IN_SNPRINTF_CHK
:
7932 case BUILT_IN_VSNPRINTF_CHK
:
7933 maybe_emit_chk_warning (exp
, fcode
);
7936 case BUILT_IN_SPRINTF_CHK
:
7937 case BUILT_IN_VSPRINTF_CHK
:
7938 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7941 case BUILT_IN_THREAD_POINTER
:
7942 return expand_builtin_thread_pointer (exp
, target
);
7944 case BUILT_IN_SET_THREAD_POINTER
:
7945 expand_builtin_set_thread_pointer (exp
);
7948 case BUILT_IN_ACC_ON_DEVICE
:
7949 /* Do library call, if we failed to expand the builtin when
7953 case BUILT_IN_GOACC_PARLEVEL_ID
:
7954 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
7955 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
7957 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
7958 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
7960 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
7961 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
7962 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
7963 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
7964 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
7965 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
7966 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
7968 default: /* just do library call, if unknown builtin */
7972 /* The switch statement above can drop through to cause the function
7973 to be called normally. */
7974 return expand_call (exp
, target
, ignore
);
7977 /* Determine whether a tree node represents a call to a built-in
7978 function. If the tree T is a call to a built-in function with
7979 the right number of arguments of the appropriate types, return
7980 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7981 Otherwise the return value is END_BUILTINS. */
7983 enum built_in_function
7984 builtin_mathfn_code (const_tree t
)
7986 const_tree fndecl
, arg
, parmlist
;
7987 const_tree argtype
, parmtype
;
7988 const_call_expr_arg_iterator iter
;
7990 if (TREE_CODE (t
) != CALL_EXPR
)
7991 return END_BUILTINS
;
7993 fndecl
= get_callee_fndecl (t
);
7994 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
7995 return END_BUILTINS
;
7997 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7998 init_const_call_expr_arg_iterator (t
, &iter
);
7999 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
8001 /* If a function doesn't take a variable number of arguments,
8002 the last element in the list will have type `void'. */
8003 parmtype
= TREE_VALUE (parmlist
);
8004 if (VOID_TYPE_P (parmtype
))
8006 if (more_const_call_expr_args_p (&iter
))
8007 return END_BUILTINS
;
8008 return DECL_FUNCTION_CODE (fndecl
);
8011 if (! more_const_call_expr_args_p (&iter
))
8012 return END_BUILTINS
;
8014 arg
= next_const_call_expr_arg (&iter
);
8015 argtype
= TREE_TYPE (arg
);
8017 if (SCALAR_FLOAT_TYPE_P (parmtype
))
8019 if (! SCALAR_FLOAT_TYPE_P (argtype
))
8020 return END_BUILTINS
;
8022 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
8024 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
8025 return END_BUILTINS
;
8027 else if (POINTER_TYPE_P (parmtype
))
8029 if (! POINTER_TYPE_P (argtype
))
8030 return END_BUILTINS
;
8032 else if (INTEGRAL_TYPE_P (parmtype
))
8034 if (! INTEGRAL_TYPE_P (argtype
))
8035 return END_BUILTINS
;
8038 return END_BUILTINS
;
8041 /* Variable-length argument list. */
8042 return DECL_FUNCTION_CODE (fndecl
);
8045 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8046 evaluate to a constant. */
8049 fold_builtin_constant_p (tree arg
)
8051 /* We return 1 for a numeric type that's known to be a constant
8052 value at compile-time or for an aggregate type that's a
8053 literal constant. */
8056 /* If we know this is a constant, emit the constant of one. */
8057 if (CONSTANT_CLASS_P (arg
)
8058 || (TREE_CODE (arg
) == CONSTRUCTOR
8059 && TREE_CONSTANT (arg
)))
8060 return integer_one_node
;
8061 if (TREE_CODE (arg
) == ADDR_EXPR
)
8063 tree op
= TREE_OPERAND (arg
, 0);
8064 if (TREE_CODE (op
) == STRING_CST
8065 || (TREE_CODE (op
) == ARRAY_REF
8066 && integer_zerop (TREE_OPERAND (op
, 1))
8067 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
8068 return integer_one_node
;
8071 /* If this expression has side effects, show we don't know it to be a
8072 constant. Likewise if it's a pointer or aggregate type since in
8073 those case we only want literals, since those are only optimized
8074 when generating RTL, not later.
8075 And finally, if we are compiling an initializer, not code, we
8076 need to return a definite result now; there's not going to be any
8077 more optimization done. */
8078 if (TREE_SIDE_EFFECTS (arg
)
8079 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
8080 || POINTER_TYPE_P (TREE_TYPE (arg
))
8082 || folding_initializer
8083 || force_folding_builtin_constant_p
)
8084 return integer_zero_node
;
8089 /* Create builtin_expect or builtin_expect_with_probability
8090 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8091 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8092 builtin_expect_with_probability instead uses third argument as PROBABILITY
8096 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
8097 tree predictor
, tree probability
)
8099 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
8101 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
8102 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
8103 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
8104 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
8105 pred_type
= TREE_VALUE (arg_types
);
8106 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
8108 pred
= fold_convert_loc (loc
, pred_type
, pred
);
8109 expected
= fold_convert_loc (loc
, expected_type
, expected
);
8112 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
8114 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
8117 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
8118 build_int_cst (ret_type
, 0));
8121 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8122 NULL_TREE if no simplification is possible. */
8125 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
8128 tree inner
, fndecl
, inner_arg0
;
8129 enum tree_code code
;
8131 /* Distribute the expected value over short-circuiting operators.
8132 See through the cast from truthvalue_type_node to long. */
8134 while (CONVERT_EXPR_P (inner_arg0
)
8135 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
8136 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
8137 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
8139 /* If this is a builtin_expect within a builtin_expect keep the
8140 inner one. See through a comparison against a constant. It
8141 might have been added to create a thruthvalue. */
8144 if (COMPARISON_CLASS_P (inner
)
8145 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8146 inner
= TREE_OPERAND (inner
, 0);
8148 if (TREE_CODE (inner
) == CALL_EXPR
8149 && (fndecl
= get_callee_fndecl (inner
))
8150 && (fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
8151 || fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT_WITH_PROBABILITY
)))
8155 code
= TREE_CODE (inner
);
8156 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8158 tree op0
= TREE_OPERAND (inner
, 0);
8159 tree op1
= TREE_OPERAND (inner
, 1);
8160 arg1
= save_expr (arg1
);
8162 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
8163 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
8164 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8166 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8169 /* If the argument isn't invariant then there's nothing else we can do. */
8170 if (!TREE_CONSTANT (inner_arg0
))
8173 /* If we expect that a comparison against the argument will fold to
8174 a constant return the constant. In practice, this means a true
8175 constant or the address of a non-weak symbol. */
8178 if (TREE_CODE (inner
) == ADDR_EXPR
)
8182 inner
= TREE_OPERAND (inner
, 0);
8184 while (TREE_CODE (inner
) == COMPONENT_REF
8185 || TREE_CODE (inner
) == ARRAY_REF
);
8186 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8190 /* Otherwise, ARG0 already has the proper type for the return value. */
8194 /* Fold a call to __builtin_classify_type with argument ARG. */
8197 fold_builtin_classify_type (tree arg
)
8200 return build_int_cst (integer_type_node
, no_type_class
);
8202 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8205 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8209 fold_builtin_strlen (location_t loc
, tree expr
, tree type
, tree arg
)
8211 if (!validate_arg (arg
, POINTER_TYPE
))
8215 c_strlen_data lendata
= { };
8216 tree len
= c_strlen (arg
, 0, &lendata
);
8219 return fold_convert_loc (loc
, type
, len
);
8221 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8222 also early enough to detect invalid reads in multimensional
8223 arrays and struct members. */
8225 c_strlen (arg
, 1, &lendata
);
8229 if (EXPR_HAS_LOCATION (arg
))
8230 loc
= EXPR_LOCATION (arg
);
8231 else if (loc
== UNKNOWN_LOCATION
)
8232 loc
= input_location
;
8233 warn_string_no_nul (loc
, expr
, "strlen", arg
, lendata
.decl
);
8240 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8243 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8245 REAL_VALUE_TYPE real
;
8247 /* __builtin_inff is intended to be usable to define INFINITY on all
8248 targets. If an infinity is not available, INFINITY expands "to a
8249 positive constant of type float that overflows at translation
8250 time", footnote "In this case, using INFINITY will violate the
8251 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8252 Thus we pedwarn to ensure this constraint violation is
8254 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8255 pedwarn (loc
, 0, "target format does not support infinity");
8258 return build_real (type
, real
);
8261 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8262 NULL_TREE if no simplification can be made. */
8265 fold_builtin_sincos (location_t loc
,
8266 tree arg0
, tree arg1
, tree arg2
)
8269 tree fndecl
, call
= NULL_TREE
;
8271 if (!validate_arg (arg0
, REAL_TYPE
)
8272 || !validate_arg (arg1
, POINTER_TYPE
)
8273 || !validate_arg (arg2
, POINTER_TYPE
))
8276 type
= TREE_TYPE (arg0
);
8278 /* Calculate the result when the argument is a constant. */
8279 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8280 if (fn
== END_BUILTINS
)
8283 /* Canonicalize sincos to cexpi. */
8284 if (TREE_CODE (arg0
) == REAL_CST
)
8286 tree complex_type
= build_complex_type (type
);
8287 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8291 if (!targetm
.libc_has_function (function_c99_math_complex
, type
)
8292 || !builtin_decl_implicit_p (fn
))
8294 fndecl
= builtin_decl_explicit (fn
);
8295 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8296 call
= builtin_save_expr (call
);
8299 tree ptype
= build_pointer_type (type
);
8300 arg1
= fold_convert (ptype
, arg1
);
8301 arg2
= fold_convert (ptype
, arg2
);
8302 return build2 (COMPOUND_EXPR
, void_type_node
,
8303 build2 (MODIFY_EXPR
, void_type_node
,
8304 build_fold_indirect_ref_loc (loc
, arg1
),
8305 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8306 build2 (MODIFY_EXPR
, void_type_node
,
8307 build_fold_indirect_ref_loc (loc
, arg2
),
8308 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8311 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8312 Return NULL_TREE if no simplification can be made. */
8315 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8317 if (!validate_arg (arg1
, POINTER_TYPE
)
8318 || !validate_arg (arg2
, POINTER_TYPE
)
8319 || !validate_arg (len
, INTEGER_TYPE
))
8322 /* If the LEN parameter is zero, return zero. */
8323 if (integer_zerop (len
))
8324 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8327 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8328 if (operand_equal_p (arg1
, arg2
, 0))
8329 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8331 /* If len parameter is one, return an expression corresponding to
8332 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8333 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8335 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8336 tree cst_uchar_ptr_node
8337 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8340 = fold_convert_loc (loc
, integer_type_node
,
8341 build1 (INDIRECT_REF
, cst_uchar_node
,
8342 fold_convert_loc (loc
,
8346 = fold_convert_loc (loc
, integer_type_node
,
8347 build1 (INDIRECT_REF
, cst_uchar_node
,
8348 fold_convert_loc (loc
,
8351 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8357 /* Fold a call to builtin isascii with argument ARG. */
8360 fold_builtin_isascii (location_t loc
, tree arg
)
8362 if (!validate_arg (arg
, INTEGER_TYPE
))
8366 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8367 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8368 build_int_cst (integer_type_node
,
8369 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8370 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8371 arg
, integer_zero_node
);
8375 /* Fold a call to builtin toascii with argument ARG. */
8378 fold_builtin_toascii (location_t loc
, tree arg
)
8380 if (!validate_arg (arg
, INTEGER_TYPE
))
8383 /* Transform toascii(c) -> (c & 0x7f). */
8384 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8385 build_int_cst (integer_type_node
, 0x7f));
8388 /* Fold a call to builtin isdigit with argument ARG. */
8391 fold_builtin_isdigit (location_t loc
, tree arg
)
8393 if (!validate_arg (arg
, INTEGER_TYPE
))
8397 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8398 /* According to the C standard, isdigit is unaffected by locale.
8399 However, it definitely is affected by the target character set. */
8400 unsigned HOST_WIDE_INT target_digit0
8401 = lang_hooks
.to_target_charset ('0');
8403 if (target_digit0
== 0)
8406 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8407 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8408 build_int_cst (unsigned_type_node
, target_digit0
));
8409 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8410 build_int_cst (unsigned_type_node
, 9));
8414 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8417 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8419 if (!validate_arg (arg
, REAL_TYPE
))
8422 arg
= fold_convert_loc (loc
, type
, arg
);
8423 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8426 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8429 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8431 if (!validate_arg (arg
, INTEGER_TYPE
))
8434 arg
= fold_convert_loc (loc
, type
, arg
);
8435 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8438 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8441 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8443 if (validate_arg (arg
, COMPLEX_TYPE
)
8444 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8446 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8450 tree new_arg
= builtin_save_expr (arg
);
8451 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8452 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8453 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8460 /* Fold a call to builtin frexp, we can assume the base is 2. */
8463 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8465 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8470 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8473 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8475 /* Proceed if a valid pointer type was passed in. */
8476 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8478 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8484 /* For +-0, return (*exp = 0, +-0). */
8485 exp
= integer_zero_node
;
8490 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8491 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8494 /* Since the frexp function always expects base 2, and in
8495 GCC normalized significands are already in the range
8496 [0.5, 1.0), we have exactly what frexp wants. */
8497 REAL_VALUE_TYPE frac_rvt
= *value
;
8498 SET_REAL_EXP (&frac_rvt
, 0);
8499 frac
= build_real (rettype
, frac_rvt
);
8500 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8507 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8508 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8509 TREE_SIDE_EFFECTS (arg1
) = 1;
8510 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8516 /* Fold a call to builtin modf. */
8519 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8521 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8526 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8529 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8531 /* Proceed if a valid pointer type was passed in. */
8532 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8534 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8535 REAL_VALUE_TYPE trunc
, frac
;
8541 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8542 trunc
= frac
= *value
;
8545 /* For +-Inf, return (*arg1 = arg0, +-0). */
8547 frac
.sign
= value
->sign
;
8551 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8552 real_trunc (&trunc
, VOIDmode
, value
);
8553 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8554 /* If the original number was negative and already
8555 integral, then the fractional part is -0.0. */
8556 if (value
->sign
&& frac
.cl
== rvc_zero
)
8557 frac
.sign
= value
->sign
;
8561 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8562 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8563 build_real (rettype
, trunc
));
8564 TREE_SIDE_EFFECTS (arg1
) = 1;
8565 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8566 build_real (rettype
, frac
));
8572 /* Given a location LOC, an interclass builtin function decl FNDECL
8573 and its single argument ARG, return an folded expression computing
8574 the same, or NULL_TREE if we either couldn't or didn't want to fold
8575 (the latter happen if there's an RTL instruction available). */
8578 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8582 if (!validate_arg (arg
, REAL_TYPE
))
8585 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8588 mode
= TYPE_MODE (TREE_TYPE (arg
));
8590 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8592 /* If there is no optab, try generic code. */
8593 switch (DECL_FUNCTION_CODE (fndecl
))
8597 CASE_FLT_FN (BUILT_IN_ISINF
):
8599 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8600 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8601 tree type
= TREE_TYPE (arg
);
8605 if (is_ibm_extended
)
8607 /* NaN and Inf are encoded in the high-order double value
8608 only. The low-order value is not significant. */
8609 type
= double_type_node
;
8611 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8613 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
8614 real_from_string (&r
, buf
);
8615 result
= build_call_expr (isgr_fn
, 2,
8616 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8617 build_real (type
, r
));
8620 CASE_FLT_FN (BUILT_IN_FINITE
):
8621 case BUILT_IN_ISFINITE
:
8623 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8624 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8625 tree type
= TREE_TYPE (arg
);
8629 if (is_ibm_extended
)
8631 /* NaN and Inf are encoded in the high-order double value
8632 only. The low-order value is not significant. */
8633 type
= double_type_node
;
8635 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8637 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
8638 real_from_string (&r
, buf
);
8639 result
= build_call_expr (isle_fn
, 2,
8640 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8641 build_real (type
, r
));
8642 /*result = fold_build2_loc (loc, UNGT_EXPR,
8643 TREE_TYPE (TREE_TYPE (fndecl)),
8644 fold_build1_loc (loc, ABS_EXPR, type, arg),
8645 build_real (type, r));
8646 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8647 TREE_TYPE (TREE_TYPE (fndecl)),
8651 case BUILT_IN_ISNORMAL
:
8653 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8654 islessequal(fabs(x),DBL_MAX). */
8655 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8656 tree type
= TREE_TYPE (arg
);
8657 tree orig_arg
, max_exp
, min_exp
;
8658 machine_mode orig_mode
= mode
;
8659 REAL_VALUE_TYPE rmax
, rmin
;
8662 orig_arg
= arg
= builtin_save_expr (arg
);
8663 if (is_ibm_extended
)
8665 /* Use double to test the normal range of IBM extended
8666 precision. Emin for IBM extended precision is
8667 different to emin for IEEE double, being 53 higher
8668 since the low double exponent is at least 53 lower
8669 than the high double exponent. */
8670 type
= double_type_node
;
8672 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8674 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8676 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
), false);
8677 real_from_string (&rmax
, buf
);
8678 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
8679 real_from_string (&rmin
, buf
);
8680 max_exp
= build_real (type
, rmax
);
8681 min_exp
= build_real (type
, rmin
);
8683 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
8684 if (is_ibm_extended
)
8686 /* Testing the high end of the range is done just using
8687 the high double, using the same test as isfinite().
8688 For the subnormal end of the range we first test the
8689 high double, then if its magnitude is equal to the
8690 limit of 0x1p-969, we test whether the low double is
8691 non-zero and opposite sign to the high double. */
8692 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
8693 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8694 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
8695 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
8697 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
8698 complex_double_type_node
, orig_arg
);
8699 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
8700 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
8701 tree zero
= build_real (type
, dconst0
);
8702 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
8703 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
8704 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
8705 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
8706 fold_build3 (COND_EXPR
,
8709 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
8711 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
8717 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8718 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
8720 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
8731 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8732 ARG is the argument for the call. */
8735 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8737 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8739 if (!validate_arg (arg
, REAL_TYPE
))
8742 switch (builtin_index
)
8744 case BUILT_IN_ISINF
:
8745 if (tree_expr_infinite_p (arg
))
8746 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8747 if (!tree_expr_maybe_infinite_p (arg
))
8748 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8751 case BUILT_IN_ISINF_SIGN
:
8753 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8754 /* In a boolean context, GCC will fold the inner COND_EXPR to
8755 1. So e.g. "if (isinf_sign(x))" would be folded to just
8756 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8757 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
8758 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8759 tree tmp
= NULL_TREE
;
8761 arg
= builtin_save_expr (arg
);
8763 if (signbit_fn
&& isinf_fn
)
8765 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8766 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8768 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8769 signbit_call
, integer_zero_node
);
8770 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8771 isinf_call
, integer_zero_node
);
8773 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8774 integer_minus_one_node
, integer_one_node
);
8775 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8783 case BUILT_IN_ISFINITE
:
8784 if (tree_expr_finite_p (arg
))
8785 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8786 if (tree_expr_nan_p (arg
) || tree_expr_infinite_p (arg
))
8787 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8790 case BUILT_IN_ISNAN
:
8791 if (tree_expr_nan_p (arg
))
8792 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8793 if (!tree_expr_maybe_nan_p (arg
))
8794 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8797 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
8798 if (is_ibm_extended
)
8800 /* NaN and Inf are encoded in the high-order double value
8801 only. The low-order value is not significant. */
8802 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
8805 arg
= builtin_save_expr (arg
);
8806 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8813 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8814 This builtin will generate code to return the appropriate floating
8815 point classification depending on the value of the floating point
8816 number passed in. The possible return values must be supplied as
8817 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8818 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8819 one floating point argument which is "type generic". */
8822 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8824 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8825 arg
, type
, res
, tmp
;
8830 /* Verify the required arguments in the original call. */
8832 || !validate_arg (args
[0], INTEGER_TYPE
)
8833 || !validate_arg (args
[1], INTEGER_TYPE
)
8834 || !validate_arg (args
[2], INTEGER_TYPE
)
8835 || !validate_arg (args
[3], INTEGER_TYPE
)
8836 || !validate_arg (args
[4], INTEGER_TYPE
)
8837 || !validate_arg (args
[5], REAL_TYPE
))
8841 fp_infinite
= args
[1];
8842 fp_normal
= args
[2];
8843 fp_subnormal
= args
[3];
8846 type
= TREE_TYPE (arg
);
8847 mode
= TYPE_MODE (type
);
8848 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8852 (fabs(x) == Inf ? FP_INFINITE :
8853 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8854 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8856 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8857 build_real (type
, dconst0
));
8858 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8859 tmp
, fp_zero
, fp_subnormal
);
8861 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8862 real_from_string (&r
, buf
);
8863 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8864 arg
, build_real (type
, r
));
8865 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8867 if (tree_expr_maybe_infinite_p (arg
))
8870 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8871 build_real (type
, r
));
8872 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8876 if (tree_expr_maybe_nan_p (arg
))
8878 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8879 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8885 /* Fold a call to an unordered comparison function such as
8886 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8887 being called and ARG0 and ARG1 are the arguments for the call.
8888 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8889 the opposite of the desired result. UNORDERED_CODE is used
8890 for modes that can hold NaNs and ORDERED_CODE is used for
8894 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8895 enum tree_code unordered_code
,
8896 enum tree_code ordered_code
)
8898 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8899 enum tree_code code
;
8901 enum tree_code code0
, code1
;
8902 tree cmp_type
= NULL_TREE
;
8904 type0
= TREE_TYPE (arg0
);
8905 type1
= TREE_TYPE (arg1
);
8907 code0
= TREE_CODE (type0
);
8908 code1
= TREE_CODE (type1
);
8910 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8911 /* Choose the wider of two real types. */
8912 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8914 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8916 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8919 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8920 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8922 if (unordered_code
== UNORDERED_EXPR
)
8924 if (tree_expr_nan_p (arg0
) || tree_expr_nan_p (arg1
))
8925 return omit_two_operands_loc (loc
, type
, integer_one_node
, arg0
, arg1
);
8926 if (!tree_expr_maybe_nan_p (arg0
) && !tree_expr_maybe_nan_p (arg1
))
8927 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8928 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8931 code
= (tree_expr_maybe_nan_p (arg0
) || tree_expr_maybe_nan_p (arg1
))
8932 ? unordered_code
: ordered_code
;
8933 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8934 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8937 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8938 arithmetics if it can never overflow, or into internal functions that
8939 return both result of arithmetics and overflowed boolean flag in
8940 a complex integer result, or some other check for overflow.
8941 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8942 checking part of that. */
8945 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8946 tree arg0
, tree arg1
, tree arg2
)
8948 enum internal_fn ifn
= IFN_LAST
;
8949 /* The code of the expression corresponding to the built-in. */
8950 enum tree_code opcode
= ERROR_MARK
;
8951 bool ovf_only
= false;
8955 case BUILT_IN_ADD_OVERFLOW_P
:
8958 case BUILT_IN_ADD_OVERFLOW
:
8959 case BUILT_IN_SADD_OVERFLOW
:
8960 case BUILT_IN_SADDL_OVERFLOW
:
8961 case BUILT_IN_SADDLL_OVERFLOW
:
8962 case BUILT_IN_UADD_OVERFLOW
:
8963 case BUILT_IN_UADDL_OVERFLOW
:
8964 case BUILT_IN_UADDLL_OVERFLOW
:
8966 ifn
= IFN_ADD_OVERFLOW
;
8968 case BUILT_IN_SUB_OVERFLOW_P
:
8971 case BUILT_IN_SUB_OVERFLOW
:
8972 case BUILT_IN_SSUB_OVERFLOW
:
8973 case BUILT_IN_SSUBL_OVERFLOW
:
8974 case BUILT_IN_SSUBLL_OVERFLOW
:
8975 case BUILT_IN_USUB_OVERFLOW
:
8976 case BUILT_IN_USUBL_OVERFLOW
:
8977 case BUILT_IN_USUBLL_OVERFLOW
:
8978 opcode
= MINUS_EXPR
;
8979 ifn
= IFN_SUB_OVERFLOW
;
8981 case BUILT_IN_MUL_OVERFLOW_P
:
8984 case BUILT_IN_MUL_OVERFLOW
:
8985 case BUILT_IN_SMUL_OVERFLOW
:
8986 case BUILT_IN_SMULL_OVERFLOW
:
8987 case BUILT_IN_SMULLL_OVERFLOW
:
8988 case BUILT_IN_UMUL_OVERFLOW
:
8989 case BUILT_IN_UMULL_OVERFLOW
:
8990 case BUILT_IN_UMULLL_OVERFLOW
:
8992 ifn
= IFN_MUL_OVERFLOW
;
8998 /* For the "generic" overloads, the first two arguments can have different
8999 types and the last argument determines the target type to use to check
9000 for overflow. The arguments of the other overloads all have the same
9002 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
9004 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9005 arguments are constant, attempt to fold the built-in call into a constant
9006 expression indicating whether or not it detected an overflow. */
9008 && TREE_CODE (arg0
) == INTEGER_CST
9009 && TREE_CODE (arg1
) == INTEGER_CST
)
9010 /* Perform the computation in the target type and check for overflow. */
9011 return omit_one_operand_loc (loc
, boolean_type_node
,
9012 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9013 ? boolean_true_node
: boolean_false_node
,
9016 tree intres
, ovfres
;
9017 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9019 intres
= fold_binary_loc (loc
, opcode
, type
,
9020 fold_convert_loc (loc
, type
, arg0
),
9021 fold_convert_loc (loc
, type
, arg1
));
9022 if (TREE_OVERFLOW (intres
))
9023 intres
= drop_tree_overflow (intres
);
9024 ovfres
= (arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9025 ? boolean_true_node
: boolean_false_node
);
9029 tree ctype
= build_complex_type (type
);
9030 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
9032 tree tgt
= save_expr (call
);
9033 intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9034 ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9035 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9039 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
9041 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9043 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9044 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9047 /* Fold a call to __builtin_FILE to a constant string. */
9050 fold_builtin_FILE (location_t loc
)
9052 if (const char *fname
= LOCATION_FILE (loc
))
9054 /* The documentation says this builtin is equivalent to the preprocessor
9055 __FILE__ macro so it appears appropriate to use the same file prefix
9057 fname
= remap_macro_filename (fname
);
9058 return build_string_literal (strlen (fname
) + 1, fname
);
9061 return build_string_literal (1, "");
9064 /* Fold a call to __builtin_FUNCTION to a constant string. */
9067 fold_builtin_FUNCTION ()
9069 const char *name
= "";
9071 if (current_function_decl
)
9072 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
9074 return build_string_literal (strlen (name
) + 1, name
);
9077 /* Fold a call to __builtin_LINE to an integer constant. */
9080 fold_builtin_LINE (location_t loc
, tree type
)
9082 return build_int_cst (type
, LOCATION_LINE (loc
));
9085 /* Fold a call to built-in function FNDECL with 0 arguments.
9086 This function returns NULL_TREE if no simplification was possible. */
9089 fold_builtin_0 (location_t loc
, tree fndecl
)
9091 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9092 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9096 return fold_builtin_FILE (loc
);
9098 case BUILT_IN_FUNCTION
:
9099 return fold_builtin_FUNCTION ();
9102 return fold_builtin_LINE (loc
, type
);
9104 CASE_FLT_FN (BUILT_IN_INF
):
9105 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
9106 case BUILT_IN_INFD32
:
9107 case BUILT_IN_INFD64
:
9108 case BUILT_IN_INFD128
:
9109 return fold_builtin_inf (loc
, type
, true);
9111 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9112 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
9113 return fold_builtin_inf (loc
, type
, false);
9115 case BUILT_IN_CLASSIFY_TYPE
:
9116 return fold_builtin_classify_type (NULL_TREE
);
9124 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9125 This function returns NULL_TREE if no simplification was possible. */
9128 fold_builtin_1 (location_t loc
, tree expr
, tree fndecl
, tree arg0
)
9130 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9131 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9133 if (TREE_CODE (arg0
) == ERROR_MARK
)
9136 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
9141 case BUILT_IN_CONSTANT_P
:
9143 tree val
= fold_builtin_constant_p (arg0
);
9145 /* Gimplification will pull the CALL_EXPR for the builtin out of
9146 an if condition. When not optimizing, we'll not CSE it back.
9147 To avoid link error types of regressions, return false now. */
9148 if (!val
&& !optimize
)
9149 val
= integer_zero_node
;
9154 case BUILT_IN_CLASSIFY_TYPE
:
9155 return fold_builtin_classify_type (arg0
);
9157 case BUILT_IN_STRLEN
:
9158 return fold_builtin_strlen (loc
, expr
, type
, arg0
);
9160 CASE_FLT_FN (BUILT_IN_FABS
):
9161 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
9162 case BUILT_IN_FABSD32
:
9163 case BUILT_IN_FABSD64
:
9164 case BUILT_IN_FABSD128
:
9165 return fold_builtin_fabs (loc
, arg0
, type
);
9169 case BUILT_IN_LLABS
:
9170 case BUILT_IN_IMAXABS
:
9171 return fold_builtin_abs (loc
, arg0
, type
);
9173 CASE_FLT_FN (BUILT_IN_CONJ
):
9174 if (validate_arg (arg0
, COMPLEX_TYPE
)
9175 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9176 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9179 CASE_FLT_FN (BUILT_IN_CREAL
):
9180 if (validate_arg (arg0
, COMPLEX_TYPE
)
9181 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9182 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9185 CASE_FLT_FN (BUILT_IN_CIMAG
):
9186 if (validate_arg (arg0
, COMPLEX_TYPE
)
9187 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9188 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9191 CASE_FLT_FN (BUILT_IN_CARG
):
9192 return fold_builtin_carg (loc
, arg0
, type
);
9194 case BUILT_IN_ISASCII
:
9195 return fold_builtin_isascii (loc
, arg0
);
9197 case BUILT_IN_TOASCII
:
9198 return fold_builtin_toascii (loc
, arg0
);
9200 case BUILT_IN_ISDIGIT
:
9201 return fold_builtin_isdigit (loc
, arg0
);
9203 CASE_FLT_FN (BUILT_IN_FINITE
):
9204 case BUILT_IN_FINITED32
:
9205 case BUILT_IN_FINITED64
:
9206 case BUILT_IN_FINITED128
:
9207 case BUILT_IN_ISFINITE
:
9209 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9212 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9215 CASE_FLT_FN (BUILT_IN_ISINF
):
9216 case BUILT_IN_ISINFD32
:
9217 case BUILT_IN_ISINFD64
:
9218 case BUILT_IN_ISINFD128
:
9220 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9223 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9226 case BUILT_IN_ISNORMAL
:
9227 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9229 case BUILT_IN_ISINF_SIGN
:
9230 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9232 CASE_FLT_FN (BUILT_IN_ISNAN
):
9233 case BUILT_IN_ISNAND32
:
9234 case BUILT_IN_ISNAND64
:
9235 case BUILT_IN_ISNAND128
:
9236 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9239 if (integer_zerop (arg0
))
9240 return build_empty_stmt (loc
);
9251 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9252 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9253 if no simplification was possible. */
9256 fold_builtin_2 (location_t loc
, tree expr
, tree fndecl
, tree arg0
, tree arg1
)
9258 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9259 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9261 if (TREE_CODE (arg0
) == ERROR_MARK
9262 || TREE_CODE (arg1
) == ERROR_MARK
)
9265 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9270 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9271 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9272 if (validate_arg (arg0
, REAL_TYPE
)
9273 && validate_arg (arg1
, POINTER_TYPE
))
9274 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9277 CASE_FLT_FN (BUILT_IN_FREXP
):
9278 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9280 CASE_FLT_FN (BUILT_IN_MODF
):
9281 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9283 case BUILT_IN_STRSPN
:
9284 return fold_builtin_strspn (loc
, expr
, arg0
, arg1
);
9286 case BUILT_IN_STRCSPN
:
9287 return fold_builtin_strcspn (loc
, expr
, arg0
, arg1
);
9289 case BUILT_IN_STRPBRK
:
9290 return fold_builtin_strpbrk (loc
, expr
, arg0
, arg1
, type
);
9292 case BUILT_IN_EXPECT
:
9293 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
9295 case BUILT_IN_ISGREATER
:
9296 return fold_builtin_unordered_cmp (loc
, fndecl
,
9297 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9298 case BUILT_IN_ISGREATEREQUAL
:
9299 return fold_builtin_unordered_cmp (loc
, fndecl
,
9300 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9301 case BUILT_IN_ISLESS
:
9302 return fold_builtin_unordered_cmp (loc
, fndecl
,
9303 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9304 case BUILT_IN_ISLESSEQUAL
:
9305 return fold_builtin_unordered_cmp (loc
, fndecl
,
9306 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9307 case BUILT_IN_ISLESSGREATER
:
9308 return fold_builtin_unordered_cmp (loc
, fndecl
,
9309 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9310 case BUILT_IN_ISUNORDERED
:
9311 return fold_builtin_unordered_cmp (loc
, fndecl
,
9312 arg0
, arg1
, UNORDERED_EXPR
,
9315 /* We do the folding for va_start in the expander. */
9316 case BUILT_IN_VA_START
:
9319 case BUILT_IN_OBJECT_SIZE
:
9320 return fold_builtin_object_size (arg0
, arg1
);
9322 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9323 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9325 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9326 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9334 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9336 This function returns NULL_TREE if no simplification was possible. */
9339 fold_builtin_3 (location_t loc
, tree fndecl
,
9340 tree arg0
, tree arg1
, tree arg2
)
9342 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9343 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9345 if (TREE_CODE (arg0
) == ERROR_MARK
9346 || TREE_CODE (arg1
) == ERROR_MARK
9347 || TREE_CODE (arg2
) == ERROR_MARK
)
9350 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9357 CASE_FLT_FN (BUILT_IN_SINCOS
):
9358 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9360 CASE_FLT_FN (BUILT_IN_REMQUO
):
9361 if (validate_arg (arg0
, REAL_TYPE
)
9362 && validate_arg (arg1
, REAL_TYPE
)
9363 && validate_arg (arg2
, POINTER_TYPE
))
9364 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9367 case BUILT_IN_MEMCMP
:
9368 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9370 case BUILT_IN_EXPECT
:
9371 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
9373 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
9374 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
9376 case BUILT_IN_ADD_OVERFLOW
:
9377 case BUILT_IN_SUB_OVERFLOW
:
9378 case BUILT_IN_MUL_OVERFLOW
:
9379 case BUILT_IN_ADD_OVERFLOW_P
:
9380 case BUILT_IN_SUB_OVERFLOW_P
:
9381 case BUILT_IN_MUL_OVERFLOW_P
:
9382 case BUILT_IN_SADD_OVERFLOW
:
9383 case BUILT_IN_SADDL_OVERFLOW
:
9384 case BUILT_IN_SADDLL_OVERFLOW
:
9385 case BUILT_IN_SSUB_OVERFLOW
:
9386 case BUILT_IN_SSUBL_OVERFLOW
:
9387 case BUILT_IN_SSUBLL_OVERFLOW
:
9388 case BUILT_IN_SMUL_OVERFLOW
:
9389 case BUILT_IN_SMULL_OVERFLOW
:
9390 case BUILT_IN_SMULLL_OVERFLOW
:
9391 case BUILT_IN_UADD_OVERFLOW
:
9392 case BUILT_IN_UADDL_OVERFLOW
:
9393 case BUILT_IN_UADDLL_OVERFLOW
:
9394 case BUILT_IN_USUB_OVERFLOW
:
9395 case BUILT_IN_USUBL_OVERFLOW
:
9396 case BUILT_IN_USUBLL_OVERFLOW
:
9397 case BUILT_IN_UMUL_OVERFLOW
:
9398 case BUILT_IN_UMULL_OVERFLOW
:
9399 case BUILT_IN_UMULLL_OVERFLOW
:
9400 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9408 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9409 ARGS is an array of NARGS arguments. IGNORE is true if the result
9410 of the function call is ignored. This function returns NULL_TREE
9411 if no simplification was possible. */
9414 fold_builtin_n (location_t loc
, tree expr
, tree fndecl
, tree
*args
,
9417 tree ret
= NULL_TREE
;
9422 ret
= fold_builtin_0 (loc
, fndecl
);
9425 ret
= fold_builtin_1 (loc
, expr
, fndecl
, args
[0]);
9428 ret
= fold_builtin_2 (loc
, expr
, fndecl
, args
[0], args
[1]);
9431 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9434 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9439 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9440 SET_EXPR_LOCATION (ret
, loc
);
9446 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9447 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9448 of arguments in ARGS to be omitted. OLDNARGS is the number of
9449 elements in ARGS. */
9452 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9453 int skip
, tree fndecl
, int n
, va_list newargs
)
9455 int nargs
= oldnargs
- skip
+ n
;
9462 buffer
= XALLOCAVEC (tree
, nargs
);
9463 for (i
= 0; i
< n
; i
++)
9464 buffer
[i
] = va_arg (newargs
, tree
);
9465 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9466 buffer
[i
] = args
[j
];
9469 buffer
= args
+ skip
;
9471 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9474 /* Return true if FNDECL shouldn't be folded right now.
9475 If a built-in function has an inline attribute always_inline
9476 wrapper, defer folding it after always_inline functions have
9477 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9478 might not be performed. */
9481 avoid_folding_inline_builtin (tree fndecl
)
9483 return (DECL_DECLARED_INLINE_P (fndecl
)
9484 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9486 && !cfun
->always_inline_functions_inlined
9487 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9490 /* A wrapper function for builtin folding that prevents warnings for
9491 "statement without effect" and the like, caused by removing the
9492 call node earlier than the warning is generated. */
9495 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9497 tree ret
= NULL_TREE
;
9498 tree fndecl
= get_callee_fndecl (exp
);
9499 if (fndecl
&& fndecl_built_in_p (fndecl
)
9500 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9501 yet. Defer folding until we see all the arguments
9502 (after inlining). */
9503 && !CALL_EXPR_VA_ARG_PACK (exp
))
9505 int nargs
= call_expr_nargs (exp
);
9507 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9508 instead last argument is __builtin_va_arg_pack (). Defer folding
9509 even in that case, until arguments are finalized. */
9510 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9512 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9513 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9517 if (avoid_folding_inline_builtin (fndecl
))
9520 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9521 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9522 CALL_EXPR_ARGP (exp
), ignore
);
9525 tree
*args
= CALL_EXPR_ARGP (exp
);
9526 ret
= fold_builtin_n (loc
, exp
, fndecl
, args
, nargs
, ignore
);
9534 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9535 N arguments are passed in the array ARGARRAY. Return a folded
9536 expression or NULL_TREE if no simplification was possible. */
9539 fold_builtin_call_array (location_t loc
, tree
,
9544 if (TREE_CODE (fn
) != ADDR_EXPR
)
9547 tree fndecl
= TREE_OPERAND (fn
, 0);
9548 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9549 && fndecl_built_in_p (fndecl
))
9551 /* If last argument is __builtin_va_arg_pack (), arguments to this
9552 function are not finalized yet. Defer folding until they are. */
9553 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9555 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9556 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9559 if (avoid_folding_inline_builtin (fndecl
))
9561 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9562 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9564 return fold_builtin_n (loc
, NULL_TREE
, fndecl
, argarray
, n
, false);
9570 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9571 along with N new arguments specified as the "..." parameters. SKIP
9572 is the number of arguments in EXP to be omitted. This function is used
9573 to do varargs-to-varargs transformations. */
9576 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9582 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9583 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9589 /* Validate a single argument ARG against a tree code CODE representing
9590 a type. Return true when argument is valid. */
9593 validate_arg (const_tree arg
, enum tree_code code
)
9597 else if (code
== POINTER_TYPE
)
9598 return POINTER_TYPE_P (TREE_TYPE (arg
));
9599 else if (code
== INTEGER_TYPE
)
9600 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9601 return code
== TREE_CODE (TREE_TYPE (arg
));
9604 /* This function validates the types of a function call argument list
9605 against a specified list of tree_codes. If the last specifier is a 0,
9606 that represents an ellipses, otherwise the last specifier must be a
9609 This is the GIMPLE version of validate_arglist. Eventually we want to
9610 completely convert builtins.c to work from GIMPLEs and the tree based
9611 validate_arglist will then be removed. */
9614 validate_gimple_arglist (const gcall
*call
, ...)
9616 enum tree_code code
;
9622 va_start (ap
, call
);
9627 code
= (enum tree_code
) va_arg (ap
, int);
9631 /* This signifies an ellipses, any further arguments are all ok. */
9635 /* This signifies an endlink, if no arguments remain, return
9636 true, otherwise return false. */
9637 res
= (i
== gimple_call_num_args (call
));
9640 /* If no parameters remain or the parameter's code does not
9641 match the specified code, return false. Otherwise continue
9642 checking any remaining arguments. */
9643 arg
= gimple_call_arg (call
, i
++);
9644 if (!validate_arg (arg
, code
))
9651 /* We need gotos here since we can only have one VA_CLOSE in a
9659 /* Default target-specific builtin expander that does nothing. */
9662 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9663 rtx target ATTRIBUTE_UNUSED
,
9664 rtx subtarget ATTRIBUTE_UNUSED
,
9665 machine_mode mode ATTRIBUTE_UNUSED
,
9666 int ignore ATTRIBUTE_UNUSED
)
9671 /* Returns true is EXP represents data that would potentially reside
9672 in a readonly section. */
9675 readonly_data_expr (tree exp
)
9679 if (TREE_CODE (exp
) != ADDR_EXPR
)
9682 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9686 /* Make sure we call decl_readonly_section only for trees it
9687 can handle (since it returns true for everything it doesn't
9689 if (TREE_CODE (exp
) == STRING_CST
9690 || TREE_CODE (exp
) == CONSTRUCTOR
9691 || (VAR_P (exp
) && TREE_STATIC (exp
)))
9692 return decl_readonly_section (exp
, 0);
9697 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9698 to the call, and TYPE is its return type.
9700 Return NULL_TREE if no simplification was possible, otherwise return the
9701 simplified form of the call as a tree.
9703 The simplified form may be a constant or other expression which
9704 computes the same value, but in a more efficient manner (including
9705 calls to other builtin functions).
9707 The call may contain arguments which need to be evaluated, but
9708 which are not useful to determine the result of the call. In
9709 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9710 COMPOUND_EXPR will be an argument which must be evaluated.
9711 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9712 COMPOUND_EXPR in the chain will contain the tree for the simplified
9713 form of the builtin function call. */
9716 fold_builtin_strpbrk (location_t loc
, tree
, tree s1
, tree s2
, tree type
)
9718 if (!validate_arg (s1
, POINTER_TYPE
)
9719 || !validate_arg (s2
, POINTER_TYPE
))
9723 const char *p1
, *p2
;
9732 const char *r
= strpbrk (p1
, p2
);
9736 return build_int_cst (TREE_TYPE (s1
), 0);
9738 /* Return an offset into the constant string argument. */
9739 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9740 return fold_convert_loc (loc
, type
, tem
);
9744 /* strpbrk(x, "") == NULL.
9745 Evaluate and ignore s1 in case it had side-effects. */
9746 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
9749 return NULL_TREE
; /* Really call strpbrk. */
9751 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9755 /* New argument list transforming strpbrk(s1, s2) to
9756 strchr(s1, s2[0]). */
9757 return build_call_expr_loc (loc
, fn
, 2, s1
,
9758 build_int_cst (integer_type_node
, p2
[0]));
9761 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9764 Return NULL_TREE if no simplification was possible, otherwise return the
9765 simplified form of the call as a tree.
9767 The simplified form may be a constant or other expression which
9768 computes the same value, but in a more efficient manner (including
9769 calls to other builtin functions).
9771 The call may contain arguments which need to be evaluated, but
9772 which are not useful to determine the result of the call. In
9773 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9774 COMPOUND_EXPR will be an argument which must be evaluated.
9775 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9776 COMPOUND_EXPR in the chain will contain the tree for the simplified
9777 form of the builtin function call. */
9780 fold_builtin_strspn (location_t loc
, tree expr
, tree s1
, tree s2
)
9782 if (!validate_arg (s1
, POINTER_TYPE
)
9783 || !validate_arg (s2
, POINTER_TYPE
))
9786 if (!check_nul_terminated_array (expr
, s1
)
9787 || !check_nul_terminated_array (expr
, s2
))
9790 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9792 /* If either argument is "", return NULL_TREE. */
9793 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9794 /* Evaluate and ignore both arguments in case either one has
9796 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9801 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9804 Return NULL_TREE if no simplification was possible, otherwise return the
9805 simplified form of the call as a tree.
9807 The simplified form may be a constant or other expression which
9808 computes the same value, but in a more efficient manner (including
9809 calls to other builtin functions).
9811 The call may contain arguments which need to be evaluated, but
9812 which are not useful to determine the result of the call. In
9813 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9814 COMPOUND_EXPR will be an argument which must be evaluated.
9815 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9816 COMPOUND_EXPR in the chain will contain the tree for the simplified
9817 form of the builtin function call. */
9820 fold_builtin_strcspn (location_t loc
, tree expr
, tree s1
, tree s2
)
9822 if (!validate_arg (s1
, POINTER_TYPE
)
9823 || !validate_arg (s2
, POINTER_TYPE
))
9826 if (!check_nul_terminated_array (expr
, s1
)
9827 || !check_nul_terminated_array (expr
, s2
))
9830 /* If the first argument is "", return NULL_TREE. */
9831 const char *p1
= c_getstr (s1
);
9832 if (p1
&& *p1
== '\0')
9834 /* Evaluate and ignore argument s2 in case it has
9836 return omit_one_operand_loc (loc
, size_type_node
,
9837 size_zero_node
, s2
);
9840 /* If the second argument is "", return __builtin_strlen(s1). */
9841 const char *p2
= c_getstr (s2
);
9842 if (p2
&& *p2
== '\0')
9844 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9846 /* If the replacement _DECL isn't initialized, don't do the
9851 return build_call_expr_loc (loc
, fn
, 1, s1
);
9856 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9857 produced. False otherwise. This is done so that we don't output the error
9858 or warning twice or three times. */
9861 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9863 tree fntype
= TREE_TYPE (current_function_decl
);
9864 int nargs
= call_expr_nargs (exp
);
9866 /* There is good chance the current input_location points inside the
9867 definition of the va_start macro (perhaps on the token for
9868 builtin) in a system header, so warnings will not be emitted.
9869 Use the location in real source code. */
9870 location_t current_location
=
9871 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9874 if (!stdarg_p (fntype
))
9876 error ("%<va_start%> used in function with fixed arguments");
9882 if (va_start_p
&& (nargs
!= 2))
9884 error ("wrong number of arguments to function %<va_start%>");
9887 arg
= CALL_EXPR_ARG (exp
, 1);
9889 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9890 when we checked the arguments and if needed issued a warning. */
9895 /* Evidently an out of date version of <stdarg.h>; can't validate
9896 va_start's second argument, but can still work as intended. */
9897 warning_at (current_location
,
9899 "%<__builtin_next_arg%> called without an argument");
9904 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9907 arg
= CALL_EXPR_ARG (exp
, 0);
9910 if (TREE_CODE (arg
) == SSA_NAME
9911 && SSA_NAME_VAR (arg
))
9912 arg
= SSA_NAME_VAR (arg
);
9914 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9915 or __builtin_next_arg (0) the first time we see it, after checking
9916 the arguments and if needed issuing a warning. */
9917 if (!integer_zerop (arg
))
9919 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9921 /* Strip off all nops for the sake of the comparison. This
9922 is not quite the same as STRIP_NOPS. It does more.
9923 We must also strip off INDIRECT_EXPR for C++ reference
9925 while (CONVERT_EXPR_P (arg
)
9926 || TREE_CODE (arg
) == INDIRECT_REF
)
9927 arg
= TREE_OPERAND (arg
, 0);
9928 if (arg
!= last_parm
)
9930 /* FIXME: Sometimes with the tree optimizers we can get the
9931 not the last argument even though the user used the last
9932 argument. We just warn and set the arg to be the last
9933 argument so that we will get wrong-code because of
9935 warning_at (current_location
,
9937 "second parameter of %<va_start%> not last named argument");
9940 /* Undefined by C99 7.15.1.4p4 (va_start):
9941 "If the parameter parmN is declared with the register storage
9942 class, with a function or array type, or with a type that is
9943 not compatible with the type that results after application of
9944 the default argument promotions, the behavior is undefined."
9946 else if (DECL_REGISTER (arg
))
9948 warning_at (current_location
,
9950 "undefined behavior when second parameter of "
9951 "%<va_start%> is declared with %<register%> storage");
9954 /* We want to verify the second parameter just once before the tree
9955 optimizers are run and then avoid keeping it in the tree,
9956 as otherwise we could warn even for correct code like:
9957 void foo (int i, ...)
9958 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9960 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9962 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9968 /* Expand a call EXP to __builtin_object_size. */
9971 expand_builtin_object_size (tree exp
)
9974 int object_size_type
;
9975 tree fndecl
= get_callee_fndecl (exp
);
9977 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9979 error ("first argument of %qD must be a pointer, second integer constant",
9981 expand_builtin_trap ();
9985 ost
= CALL_EXPR_ARG (exp
, 1);
9988 if (TREE_CODE (ost
) != INTEGER_CST
9989 || tree_int_cst_sgn (ost
) < 0
9990 || compare_tree_int (ost
, 3) > 0)
9992 error ("last argument of %qD is not integer constant between 0 and 3",
9994 expand_builtin_trap ();
9998 object_size_type
= tree_to_shwi (ost
);
10000 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10003 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10004 FCODE is the BUILT_IN_* to use.
10005 Return NULL_RTX if we failed; the caller should emit a normal call,
10006 otherwise try to get the result in TARGET, if convenient (and in
10007 mode MODE if that's convenient). */
10010 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10011 enum built_in_function fcode
)
10013 if (!validate_arglist (exp
,
10015 fcode
== BUILT_IN_MEMSET_CHK
10016 ? INTEGER_TYPE
: POINTER_TYPE
,
10017 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10020 tree dest
= CALL_EXPR_ARG (exp
, 0);
10021 tree src
= CALL_EXPR_ARG (exp
, 1);
10022 tree len
= CALL_EXPR_ARG (exp
, 2);
10023 tree size
= CALL_EXPR_ARG (exp
, 3);
10025 /* FIXME: Set access mode to write only for memset et al. */
10026 bool sizes_ok
= check_access (exp
, len
, /*maxread=*/NULL_TREE
,
10027 /*srcstr=*/NULL_TREE
, size
, access_read_write
);
10029 if (!tree_fits_uhwi_p (size
))
10032 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10034 /* Avoid transforming the checking call to an ordinary one when
10035 an overflow has been detected or when the call couldn't be
10036 validated because the size is not constant. */
10037 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10040 tree fn
= NULL_TREE
;
10041 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10042 mem{cpy,pcpy,move,set} is available. */
10045 case BUILT_IN_MEMCPY_CHK
:
10046 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10048 case BUILT_IN_MEMPCPY_CHK
:
10049 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10051 case BUILT_IN_MEMMOVE_CHK
:
10052 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10054 case BUILT_IN_MEMSET_CHK
:
10055 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10064 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10065 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10066 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10067 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10069 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10073 unsigned int dest_align
= get_pointer_alignment (dest
);
10075 /* If DEST is not a pointer type, call the normal function. */
10076 if (dest_align
== 0)
10079 /* If SRC and DEST are the same (and not volatile), do nothing. */
10080 if (operand_equal_p (src
, dest
, 0))
10084 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10086 /* Evaluate and ignore LEN in case it has side-effects. */
10087 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10088 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10091 expr
= fold_build_pointer_plus (dest
, len
);
10092 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10095 /* __memmove_chk special case. */
10096 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10098 unsigned int src_align
= get_pointer_alignment (src
);
10100 if (src_align
== 0)
10103 /* If src is categorized for a readonly section we can use
10104 normal __memcpy_chk. */
10105 if (readonly_data_expr (src
))
10107 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10110 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10111 dest
, src
, len
, size
);
10112 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10113 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10114 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10121 /* Emit warning if a buffer overflow is detected at compile time. */
10124 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10126 /* The source string. */
10127 tree srcstr
= NULL_TREE
;
10128 /* The size of the destination object returned by __builtin_object_size. */
10129 tree objsize
= NULL_TREE
;
10130 /* The string that is being concatenated with (as in __strcat_chk)
10131 or null if it isn't. */
10132 tree catstr
= NULL_TREE
;
10133 /* The maximum length of the source sequence in a bounded operation
10134 (such as __strncat_chk) or null if the operation isn't bounded
10135 (such as __strcat_chk). */
10136 tree maxread
= NULL_TREE
;
10137 /* The exact size of the access (such as in __strncpy_chk). */
10138 tree size
= NULL_TREE
;
10139 /* The access by the function that's checked. Except for snprintf
10140 both writing and reading is checked. */
10141 access_mode mode
= access_read_write
;
10145 case BUILT_IN_STRCPY_CHK
:
10146 case BUILT_IN_STPCPY_CHK
:
10147 srcstr
= CALL_EXPR_ARG (exp
, 1);
10148 objsize
= CALL_EXPR_ARG (exp
, 2);
10151 case BUILT_IN_STRCAT_CHK
:
10152 /* For __strcat_chk the warning will be emitted only if overflowing
10153 by at least strlen (dest) + 1 bytes. */
10154 catstr
= CALL_EXPR_ARG (exp
, 0);
10155 srcstr
= CALL_EXPR_ARG (exp
, 1);
10156 objsize
= CALL_EXPR_ARG (exp
, 2);
10159 case BUILT_IN_STRNCAT_CHK
:
10160 catstr
= CALL_EXPR_ARG (exp
, 0);
10161 srcstr
= CALL_EXPR_ARG (exp
, 1);
10162 maxread
= CALL_EXPR_ARG (exp
, 2);
10163 objsize
= CALL_EXPR_ARG (exp
, 3);
10166 case BUILT_IN_STRNCPY_CHK
:
10167 case BUILT_IN_STPNCPY_CHK
:
10168 srcstr
= CALL_EXPR_ARG (exp
, 1);
10169 size
= CALL_EXPR_ARG (exp
, 2);
10170 objsize
= CALL_EXPR_ARG (exp
, 3);
10173 case BUILT_IN_SNPRINTF_CHK
:
10174 case BUILT_IN_VSNPRINTF_CHK
:
10175 maxread
= CALL_EXPR_ARG (exp
, 1);
10176 objsize
= CALL_EXPR_ARG (exp
, 3);
10177 /* The only checked access the write to the destination. */
10178 mode
= access_write_only
;
10181 gcc_unreachable ();
10184 if (catstr
&& maxread
)
10186 /* Check __strncat_chk. There is no way to determine the length
10187 of the string to which the source string is being appended so
10188 just warn when the length of the source string is not known. */
10189 check_strncat_sizes (exp
, objsize
);
10193 check_access (exp
, size
, maxread
, srcstr
, objsize
, mode
);
10196 /* Emit warning if a buffer overflow is detected at compile time
10197 in __sprintf_chk/__vsprintf_chk calls. */
10200 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10202 tree size
, len
, fmt
;
10203 const char *fmt_str
;
10204 int nargs
= call_expr_nargs (exp
);
10206 /* Verify the required arguments in the original call. */
10210 size
= CALL_EXPR_ARG (exp
, 2);
10211 fmt
= CALL_EXPR_ARG (exp
, 3);
10213 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10216 /* Check whether the format is a literal string constant. */
10217 fmt_str
= c_getstr (fmt
);
10218 if (fmt_str
== NULL
)
10221 if (!init_target_chars ())
10224 /* If the format doesn't contain % args or %%, we know its size. */
10225 if (strchr (fmt_str
, target_percent
) == 0)
10226 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10227 /* If the format is "%s" and first ... argument is a string literal,
10229 else if (fcode
== BUILT_IN_SPRINTF_CHK
10230 && strcmp (fmt_str
, target_percent_s
) == 0)
10236 arg
= CALL_EXPR_ARG (exp
, 4);
10237 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10240 len
= c_strlen (arg
, 1);
10241 if (!len
|| ! tree_fits_uhwi_p (len
))
10247 /* Add one for the terminating nul. */
10248 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10250 check_access (exp
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, len
, size
,
10251 access_write_only
);
10254 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10258 fold_builtin_object_size (tree ptr
, tree ost
)
10260 unsigned HOST_WIDE_INT bytes
;
10261 int object_size_type
;
10263 if (!validate_arg (ptr
, POINTER_TYPE
)
10264 || !validate_arg (ost
, INTEGER_TYPE
))
10269 if (TREE_CODE (ost
) != INTEGER_CST
10270 || tree_int_cst_sgn (ost
) < 0
10271 || compare_tree_int (ost
, 3) > 0)
10274 object_size_type
= tree_to_shwi (ost
);
10276 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10277 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10278 and (size_t) 0 for types 2 and 3. */
10279 if (TREE_SIDE_EFFECTS (ptr
))
10280 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10282 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10284 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10285 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10286 return build_int_cstu (size_type_node
, bytes
);
10288 else if (TREE_CODE (ptr
) == SSA_NAME
)
10290 /* If object size is not known yet, delay folding until
10291 later. Maybe subsequent passes will help determining
10293 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10294 && wi::fits_to_tree_p (bytes
, size_type_node
))
10295 return build_int_cstu (size_type_node
, bytes
);
10301 /* Builtins with folding operations that operate on "..." arguments
10302 need special handling; we need to store the arguments in a convenient
10303 data structure before attempting any folding. Fortunately there are
10304 only a few builtins that fall into this category. FNDECL is the
10305 function, EXP is the CALL_EXPR for the call. */
10308 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10310 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10311 tree ret
= NULL_TREE
;
10315 case BUILT_IN_FPCLASSIFY
:
10316 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10324 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10325 SET_EXPR_LOCATION (ret
, loc
);
10326 suppress_warning (ret
);
10332 /* Initialize format string characters in the target charset. */
10335 init_target_chars (void)
10340 target_newline
= lang_hooks
.to_target_charset ('\n');
10341 target_percent
= lang_hooks
.to_target_charset ('%');
10342 target_c
= lang_hooks
.to_target_charset ('c');
10343 target_s
= lang_hooks
.to_target_charset ('s');
10344 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10348 target_percent_c
[0] = target_percent
;
10349 target_percent_c
[1] = target_c
;
10350 target_percent_c
[2] = '\0';
10352 target_percent_s
[0] = target_percent
;
10353 target_percent_s
[1] = target_s
;
10354 target_percent_s
[2] = '\0';
10356 target_percent_s_newline
[0] = target_percent
;
10357 target_percent_s_newline
[1] = target_s
;
10358 target_percent_s_newline
[2] = target_newline
;
10359 target_percent_s_newline
[3] = '\0';
10366 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10367 and no overflow/underflow occurred. INEXACT is true if M was not
10368 exactly calculated. TYPE is the tree type for the result. This
10369 function assumes that you cleared the MPFR flags and then
10370 calculated M to see if anything subsequently set a flag prior to
10371 entering this function. Return NULL_TREE if any checks fail. */
10374 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10376 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10377 overflow/underflow occurred. If -frounding-math, proceed iff the
10378 result of calling FUNC was exact. */
10379 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10380 && (!flag_rounding_math
|| !inexact
))
10382 REAL_VALUE_TYPE rr
;
10384 real_from_mpfr (&rr
, m
, type
, MPFR_RNDN
);
10385 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10386 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10387 but the mpft_t is not, then we underflowed in the
10389 if (real_isfinite (&rr
)
10390 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10392 REAL_VALUE_TYPE rmode
;
10394 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10395 /* Proceed iff the specified mode can hold the value. */
10396 if (real_identical (&rmode
, &rr
))
10397 return build_real (type
, rmode
);
10403 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10404 number and no overflow/underflow occurred. INEXACT is true if M
10405 was not exactly calculated. TYPE is the tree type for the result.
10406 This function assumes that you cleared the MPFR flags and then
10407 calculated M to see if anything subsequently set a flag prior to
10408 entering this function. Return NULL_TREE if any checks fail, if
10409 FORCE_CONVERT is true, then bypass the checks. */
10412 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10414 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10415 overflow/underflow occurred. If -frounding-math, proceed iff the
10416 result of calling FUNC was exact. */
10418 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10419 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10420 && (!flag_rounding_math
|| !inexact
)))
10422 REAL_VALUE_TYPE re
, im
;
10424 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), MPFR_RNDN
);
10425 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), MPFR_RNDN
);
10426 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10427 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10428 but the mpft_t is not, then we underflowed in the
10431 || (real_isfinite (&re
) && real_isfinite (&im
)
10432 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10433 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10435 REAL_VALUE_TYPE re_mode
, im_mode
;
10437 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10438 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10439 /* Proceed iff the specified mode can hold the value. */
10441 || (real_identical (&re_mode
, &re
)
10442 && real_identical (&im_mode
, &im
)))
10443 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10444 build_real (TREE_TYPE (type
), im_mode
));
10450 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10451 the pointer *(ARG_QUO) and return the result. The type is taken
10452 from the type of ARG0 and is used for setting the precision of the
10453 calculation and results. */
10456 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10458 tree
const type
= TREE_TYPE (arg0
);
10459 tree result
= NULL_TREE
;
10464 /* To proceed, MPFR must exactly represent the target floating point
10465 format, which only happens when the target base equals two. */
10466 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10467 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10468 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10470 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10471 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10473 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10475 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10476 const int prec
= fmt
->p
;
10477 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
10482 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10483 mpfr_from_real (m0
, ra0
, MPFR_RNDN
);
10484 mpfr_from_real (m1
, ra1
, MPFR_RNDN
);
10485 mpfr_clear_flags ();
10486 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10487 /* Remquo is independent of the rounding mode, so pass
10488 inexact=0 to do_mpfr_ckconv(). */
10489 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10490 mpfr_clears (m0
, m1
, NULL
);
10493 /* MPFR calculates quo in the host's long so it may
10494 return more bits in quo than the target int can hold
10495 if sizeof(host long) > sizeof(target int). This can
10496 happen even for native compilers in LP64 mode. In
10497 these cases, modulo the quo value with the largest
10498 number that the target int can hold while leaving one
10499 bit for the sign. */
10500 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10501 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10503 /* Dereference the quo pointer argument. */
10504 arg_quo
= build_fold_indirect_ref (arg_quo
);
10505 /* Proceed iff a valid pointer type was passed in. */
10506 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10508 /* Set the value. */
10510 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10511 build_int_cst (TREE_TYPE (arg_quo
),
10513 TREE_SIDE_EFFECTS (result_quo
) = 1;
10514 /* Combine the quo assignment with the rem. */
10515 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10516 result_quo
, result_rem
));
10524 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10525 resulting value as a tree with type TYPE. The mpfr precision is
10526 set to the precision of TYPE. We assume that this mpfr function
10527 returns zero if the result could be calculated exactly within the
10528 requested precision. In addition, the integer pointer represented
10529 by ARG_SG will be dereferenced and set to the appropriate signgam
10533 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10535 tree result
= NULL_TREE
;
10539 /* To proceed, MPFR must exactly represent the target floating point
10540 format, which only happens when the target base equals two. Also
10541 verify ARG is a constant and that ARG_SG is an int pointer. */
10542 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10543 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10544 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10545 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10547 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10549 /* In addition to NaN and Inf, the argument cannot be zero or a
10550 negative integer. */
10551 if (real_isfinite (ra
)
10552 && ra
->cl
!= rvc_zero
10553 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10555 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10556 const int prec
= fmt
->p
;
10557 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
? MPFR_RNDZ
: MPFR_RNDN
;
10562 mpfr_init2 (m
, prec
);
10563 mpfr_from_real (m
, ra
, MPFR_RNDN
);
10564 mpfr_clear_flags ();
10565 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10566 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10572 /* Dereference the arg_sg pointer argument. */
10573 arg_sg
= build_fold_indirect_ref (arg_sg
);
10574 /* Assign the signgam value into *arg_sg. */
10575 result_sg
= fold_build2 (MODIFY_EXPR
,
10576 TREE_TYPE (arg_sg
), arg_sg
,
10577 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10578 TREE_SIDE_EFFECTS (result_sg
) = 1;
10579 /* Combine the signgam assignment with the lgamma result. */
10580 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10581 result_sg
, result_lg
));
10589 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10590 mpc function FUNC on it and return the resulting value as a tree
10591 with type TYPE. The mpfr precision is set to the precision of
10592 TYPE. We assume that function FUNC returns zero if the result
10593 could be calculated exactly within the requested precision. If
10594 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10595 in the arguments and/or results. */
10598 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10599 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10601 tree result
= NULL_TREE
;
10606 /* To proceed, MPFR must exactly represent the target floating point
10607 format, which only happens when the target base equals two. */
10608 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10609 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10610 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10611 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10612 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10614 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10615 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10616 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10617 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10620 || (real_isfinite (re0
) && real_isfinite (im0
)
10621 && real_isfinite (re1
) && real_isfinite (im1
)))
10623 const struct real_format
*const fmt
=
10624 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10625 const int prec
= fmt
->p
;
10626 const mpfr_rnd_t rnd
= fmt
->round_towards_zero
10627 ? MPFR_RNDZ
: MPFR_RNDN
;
10628 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10632 mpc_init2 (m0
, prec
);
10633 mpc_init2 (m1
, prec
);
10634 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10635 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10636 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10637 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10638 mpfr_clear_flags ();
10639 inexact
= func (m0
, m0
, m1
, crnd
);
10640 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
10649 /* A wrapper function for builtin folding that prevents warnings for
10650 "statement without effect" and the like, caused by removing the
10651 call node earlier than the warning is generated. */
10654 fold_call_stmt (gcall
*stmt
, bool ignore
)
10656 tree ret
= NULL_TREE
;
10657 tree fndecl
= gimple_call_fndecl (stmt
);
10658 location_t loc
= gimple_location (stmt
);
10659 if (fndecl
&& fndecl_built_in_p (fndecl
)
10660 && !gimple_call_va_arg_pack_p (stmt
))
10662 int nargs
= gimple_call_num_args (stmt
);
10663 tree
*args
= (nargs
> 0
10664 ? gimple_call_arg_ptr (stmt
, 0)
10665 : &error_mark_node
);
10667 if (avoid_folding_inline_builtin (fndecl
))
10669 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10671 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
10675 ret
= fold_builtin_n (loc
, NULL_TREE
, fndecl
, args
, nargs
, ignore
);
10678 /* Propagate location information from original call to
10679 expansion of builtin. Otherwise things like
10680 maybe_emit_chk_warning, that operate on the expansion
10681 of a builtin, will use the wrong location information. */
10682 if (gimple_has_location (stmt
))
10684 tree realret
= ret
;
10685 if (TREE_CODE (ret
) == NOP_EXPR
)
10686 realret
= TREE_OPERAND (ret
, 0);
10687 if (CAN_HAVE_LOCATION_P (realret
)
10688 && !EXPR_HAS_LOCATION (realret
))
10689 SET_EXPR_LOCATION (realret
, loc
);
10699 /* Look up the function in builtin_decl that corresponds to DECL
10700 and set ASMSPEC as its user assembler name. DECL must be a
10701 function decl that declares a builtin. */
10704 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
10706 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
10709 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
10710 set_user_assembler_name (builtin
, asmspec
);
10712 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
10713 && INT_TYPE_SIZE
< BITS_PER_WORD
)
10715 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
10716 set_user_assembler_libfunc ("ffs", asmspec
);
10717 set_optab_libfunc (ffs_optab
, mode
, "ffs");
10721 /* Return true if DECL is a builtin that expands to a constant or similarly
10724 is_simple_builtin (tree decl
)
10726 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
10727 switch (DECL_FUNCTION_CODE (decl
))
10729 /* Builtins that expand to constants. */
10730 case BUILT_IN_CONSTANT_P
:
10731 case BUILT_IN_EXPECT
:
10732 case BUILT_IN_OBJECT_SIZE
:
10733 case BUILT_IN_UNREACHABLE
:
10734 /* Simple register moves or loads from stack. */
10735 case BUILT_IN_ASSUME_ALIGNED
:
10736 case BUILT_IN_RETURN_ADDRESS
:
10737 case BUILT_IN_EXTRACT_RETURN_ADDR
:
10738 case BUILT_IN_FROB_RETURN_ADDR
:
10739 case BUILT_IN_RETURN
:
10740 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
10741 case BUILT_IN_FRAME_ADDRESS
:
10742 case BUILT_IN_VA_END
:
10743 case BUILT_IN_STACK_SAVE
:
10744 case BUILT_IN_STACK_RESTORE
:
10745 /* Exception state returns or moves registers around. */
10746 case BUILT_IN_EH_FILTER
:
10747 case BUILT_IN_EH_POINTER
:
10748 case BUILT_IN_EH_COPY_VALUES
:
10758 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10759 most probably expanded inline into reasonably simple code. This is a
10760 superset of is_simple_builtin. */
10762 is_inexpensive_builtin (tree decl
)
10766 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
10768 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10769 switch (DECL_FUNCTION_CODE (decl
))
10772 CASE_BUILT_IN_ALLOCA
:
10773 case BUILT_IN_BSWAP16
:
10774 case BUILT_IN_BSWAP32
:
10775 case BUILT_IN_BSWAP64
:
10776 case BUILT_IN_BSWAP128
:
10778 case BUILT_IN_CLZIMAX
:
10779 case BUILT_IN_CLZL
:
10780 case BUILT_IN_CLZLL
:
10782 case BUILT_IN_CTZIMAX
:
10783 case BUILT_IN_CTZL
:
10784 case BUILT_IN_CTZLL
:
10786 case BUILT_IN_FFSIMAX
:
10787 case BUILT_IN_FFSL
:
10788 case BUILT_IN_FFSLL
:
10789 case BUILT_IN_IMAXABS
:
10790 case BUILT_IN_FINITE
:
10791 case BUILT_IN_FINITEF
:
10792 case BUILT_IN_FINITEL
:
10793 case BUILT_IN_FINITED32
:
10794 case BUILT_IN_FINITED64
:
10795 case BUILT_IN_FINITED128
:
10796 case BUILT_IN_FPCLASSIFY
:
10797 case BUILT_IN_ISFINITE
:
10798 case BUILT_IN_ISINF_SIGN
:
10799 case BUILT_IN_ISINF
:
10800 case BUILT_IN_ISINFF
:
10801 case BUILT_IN_ISINFL
:
10802 case BUILT_IN_ISINFD32
:
10803 case BUILT_IN_ISINFD64
:
10804 case BUILT_IN_ISINFD128
:
10805 case BUILT_IN_ISNAN
:
10806 case BUILT_IN_ISNANF
:
10807 case BUILT_IN_ISNANL
:
10808 case BUILT_IN_ISNAND32
:
10809 case BUILT_IN_ISNAND64
:
10810 case BUILT_IN_ISNAND128
:
10811 case BUILT_IN_ISNORMAL
:
10812 case BUILT_IN_ISGREATER
:
10813 case BUILT_IN_ISGREATEREQUAL
:
10814 case BUILT_IN_ISLESS
:
10815 case BUILT_IN_ISLESSEQUAL
:
10816 case BUILT_IN_ISLESSGREATER
:
10817 case BUILT_IN_ISUNORDERED
:
10818 case BUILT_IN_VA_ARG_PACK
:
10819 case BUILT_IN_VA_ARG_PACK_LEN
:
10820 case BUILT_IN_VA_COPY
:
10821 case BUILT_IN_TRAP
:
10822 case BUILT_IN_SAVEREGS
:
10823 case BUILT_IN_POPCOUNTL
:
10824 case BUILT_IN_POPCOUNTLL
:
10825 case BUILT_IN_POPCOUNTIMAX
:
10826 case BUILT_IN_POPCOUNT
:
10827 case BUILT_IN_PARITYL
:
10828 case BUILT_IN_PARITYLL
:
10829 case BUILT_IN_PARITYIMAX
:
10830 case BUILT_IN_PARITY
:
10831 case BUILT_IN_LABS
:
10832 case BUILT_IN_LLABS
:
10833 case BUILT_IN_PREFETCH
:
10834 case BUILT_IN_ACC_ON_DEVICE
:
10838 return is_simple_builtin (decl
);
10844 /* Return true if T is a constant and the value cast to a target char
10845 can be represented by a host char.
10846 Store the casted char constant in *P if so. */
10849 target_char_cst_p (tree t
, char *p
)
10851 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
10854 *p
= (char)tree_to_uhwi (t
);
10858 /* Return true if the builtin DECL is implemented in a standard library.
10859 Otherwise return false which doesn't guarantee it is not (thus the list
10860 of handled builtins below may be incomplete). */
10863 builtin_with_linkage_p (tree decl
)
10865 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10866 switch (DECL_FUNCTION_CODE (decl
))
10868 CASE_FLT_FN (BUILT_IN_ACOS
):
10869 CASE_FLT_FN (BUILT_IN_ACOSH
):
10870 CASE_FLT_FN (BUILT_IN_ASIN
):
10871 CASE_FLT_FN (BUILT_IN_ASINH
):
10872 CASE_FLT_FN (BUILT_IN_ATAN
):
10873 CASE_FLT_FN (BUILT_IN_ATANH
):
10874 CASE_FLT_FN (BUILT_IN_ATAN2
):
10875 CASE_FLT_FN (BUILT_IN_CBRT
):
10876 CASE_FLT_FN (BUILT_IN_CEIL
):
10877 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL
):
10878 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10879 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
10880 CASE_FLT_FN (BUILT_IN_COS
):
10881 CASE_FLT_FN (BUILT_IN_COSH
):
10882 CASE_FLT_FN (BUILT_IN_ERF
):
10883 CASE_FLT_FN (BUILT_IN_ERFC
):
10884 CASE_FLT_FN (BUILT_IN_EXP
):
10885 CASE_FLT_FN (BUILT_IN_EXP2
):
10886 CASE_FLT_FN (BUILT_IN_EXPM1
):
10887 CASE_FLT_FN (BUILT_IN_FABS
):
10888 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
10889 CASE_FLT_FN (BUILT_IN_FDIM
):
10890 CASE_FLT_FN (BUILT_IN_FLOOR
):
10891 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR
):
10892 CASE_FLT_FN (BUILT_IN_FMA
):
10893 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
10894 CASE_FLT_FN (BUILT_IN_FMAX
):
10895 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX
):
10896 CASE_FLT_FN (BUILT_IN_FMIN
):
10897 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN
):
10898 CASE_FLT_FN (BUILT_IN_FMOD
):
10899 CASE_FLT_FN (BUILT_IN_FREXP
):
10900 CASE_FLT_FN (BUILT_IN_HYPOT
):
10901 CASE_FLT_FN (BUILT_IN_ILOGB
):
10902 CASE_FLT_FN (BUILT_IN_LDEXP
):
10903 CASE_FLT_FN (BUILT_IN_LGAMMA
):
10904 CASE_FLT_FN (BUILT_IN_LLRINT
):
10905 CASE_FLT_FN (BUILT_IN_LLROUND
):
10906 CASE_FLT_FN (BUILT_IN_LOG
):
10907 CASE_FLT_FN (BUILT_IN_LOG10
):
10908 CASE_FLT_FN (BUILT_IN_LOG1P
):
10909 CASE_FLT_FN (BUILT_IN_LOG2
):
10910 CASE_FLT_FN (BUILT_IN_LOGB
):
10911 CASE_FLT_FN (BUILT_IN_LRINT
):
10912 CASE_FLT_FN (BUILT_IN_LROUND
):
10913 CASE_FLT_FN (BUILT_IN_MODF
):
10914 CASE_FLT_FN (BUILT_IN_NAN
):
10915 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10916 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT
):
10917 CASE_FLT_FN (BUILT_IN_NEXTAFTER
):
10918 CASE_FLT_FN (BUILT_IN_NEXTTOWARD
):
10919 CASE_FLT_FN (BUILT_IN_POW
):
10920 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10921 CASE_FLT_FN (BUILT_IN_REMQUO
):
10922 CASE_FLT_FN (BUILT_IN_RINT
):
10923 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT
):
10924 CASE_FLT_FN (BUILT_IN_ROUND
):
10925 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND
):
10926 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10927 CASE_FLT_FN (BUILT_IN_SCALBN
):
10928 CASE_FLT_FN (BUILT_IN_SIN
):
10929 CASE_FLT_FN (BUILT_IN_SINH
):
10930 CASE_FLT_FN (BUILT_IN_SINCOS
):
10931 CASE_FLT_FN (BUILT_IN_SQRT
):
10932 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT
):
10933 CASE_FLT_FN (BUILT_IN_TAN
):
10934 CASE_FLT_FN (BUILT_IN_TANH
):
10935 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10936 CASE_FLT_FN (BUILT_IN_TRUNC
):
10937 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC
):
10940 case BUILT_IN_STPCPY
:
10941 case BUILT_IN_STPNCPY
:
10942 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
10943 by libiberty's stpcpy.c for MinGW targets so we need to return true
10944 in order to be able to build libiberty in LTO mode for them. */
10953 /* Return true if OFFRNG is bounded to a subrange of offset values
10954 valid for the largest possible object. */
10957 access_ref::offset_bounded () const
10959 tree min
= TYPE_MIN_VALUE (ptrdiff_type_node
);
10960 tree max
= TYPE_MAX_VALUE (ptrdiff_type_node
);
10961 return wi::to_offset (min
) <= offrng
[0] && offrng
[1] <= wi::to_offset (max
);
10964 /* If CALLEE has known side effects, fill in INFO and return true.
10965 See tree-ssa-structalias.c:find_func_aliases
10966 for the list of builtins we might need to handle here. */
10969 builtin_fnspec (tree callee
)
10971 built_in_function code
= DECL_FUNCTION_CODE (callee
);
10975 /* All the following functions read memory pointed to by
10976 their second argument and write memory pointed to by first
10978 strcat/strncat additionally reads memory pointed to by the first
10980 case BUILT_IN_STRCAT
:
10981 case BUILT_IN_STRCAT_CHK
:
10983 case BUILT_IN_STRNCAT
:
10984 case BUILT_IN_STRNCAT_CHK
:
10986 case BUILT_IN_STRCPY
:
10987 case BUILT_IN_STRCPY_CHK
:
10989 case BUILT_IN_STPCPY
:
10990 case BUILT_IN_STPCPY_CHK
:
10992 case BUILT_IN_STRNCPY
:
10993 case BUILT_IN_MEMCPY
:
10994 case BUILT_IN_MEMMOVE
:
10995 case BUILT_IN_TM_MEMCPY
:
10996 case BUILT_IN_TM_MEMMOVE
:
10997 case BUILT_IN_STRNCPY_CHK
:
10998 case BUILT_IN_MEMCPY_CHK
:
10999 case BUILT_IN_MEMMOVE_CHK
:
11001 case BUILT_IN_MEMPCPY
:
11002 case BUILT_IN_MEMPCPY_CHK
:
11004 case BUILT_IN_STPNCPY
:
11005 case BUILT_IN_STPNCPY_CHK
:
11007 case BUILT_IN_BCOPY
:
11009 case BUILT_IN_BZERO
:
11011 case BUILT_IN_MEMCMP
:
11012 case BUILT_IN_MEMCMP_EQ
:
11013 case BUILT_IN_BCMP
:
11014 case BUILT_IN_STRNCMP
:
11015 case BUILT_IN_STRNCMP_EQ
:
11016 case BUILT_IN_STRNCASECMP
:
11019 /* The following functions read memory pointed to by their
11021 CASE_BUILT_IN_TM_LOAD (1):
11022 CASE_BUILT_IN_TM_LOAD (2):
11023 CASE_BUILT_IN_TM_LOAD (4):
11024 CASE_BUILT_IN_TM_LOAD (8):
11025 CASE_BUILT_IN_TM_LOAD (FLOAT
):
11026 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
11027 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
11028 CASE_BUILT_IN_TM_LOAD (M64
):
11029 CASE_BUILT_IN_TM_LOAD (M128
):
11030 CASE_BUILT_IN_TM_LOAD (M256
):
11031 case BUILT_IN_TM_LOG
:
11032 case BUILT_IN_TM_LOG_1
:
11033 case BUILT_IN_TM_LOG_2
:
11034 case BUILT_IN_TM_LOG_4
:
11035 case BUILT_IN_TM_LOG_8
:
11036 case BUILT_IN_TM_LOG_FLOAT
:
11037 case BUILT_IN_TM_LOG_DOUBLE
:
11038 case BUILT_IN_TM_LOG_LDOUBLE
:
11039 case BUILT_IN_TM_LOG_M64
:
11040 case BUILT_IN_TM_LOG_M128
:
11041 case BUILT_IN_TM_LOG_M256
:
11044 case BUILT_IN_INDEX
:
11045 case BUILT_IN_RINDEX
:
11046 case BUILT_IN_STRCHR
:
11047 case BUILT_IN_STRLEN
:
11048 case BUILT_IN_STRRCHR
:
11050 case BUILT_IN_STRNLEN
:
11053 /* These read memory pointed to by the first argument.
11054 Allocating memory does not have any side-effects apart from
11055 being the definition point for the pointer.
11056 Unix98 specifies that errno is set on allocation failure. */
11057 case BUILT_IN_STRDUP
:
11059 case BUILT_IN_STRNDUP
:
11061 /* Allocating memory does not have any side-effects apart from
11062 being the definition point for the pointer. */
11063 case BUILT_IN_MALLOC
:
11064 case BUILT_IN_ALIGNED_ALLOC
:
11065 case BUILT_IN_CALLOC
:
11066 case BUILT_IN_GOMP_ALLOC
:
11068 CASE_BUILT_IN_ALLOCA
:
11070 /* These read memory pointed to by the first argument with size
11071 in the third argument. */
11072 case BUILT_IN_MEMCHR
:
11074 /* These read memory pointed to by the first and second arguments. */
11075 case BUILT_IN_STRSTR
:
11076 case BUILT_IN_STRPBRK
:
11077 case BUILT_IN_STRCASECMP
:
11078 case BUILT_IN_STRCSPN
:
11079 case BUILT_IN_STRSPN
:
11080 case BUILT_IN_STRCMP
:
11081 case BUILT_IN_STRCMP_EQ
:
11083 /* Freeing memory kills the pointed-to memory. More importantly
11084 the call has to serve as a barrier for moving loads and stores
11086 case BUILT_IN_STACK_RESTORE
:
11087 case BUILT_IN_FREE
:
11088 case BUILT_IN_GOMP_FREE
:
11090 case BUILT_IN_VA_END
:
11092 /* Realloc serves both as allocation point and deallocation point. */
11093 case BUILT_IN_REALLOC
:
11095 case BUILT_IN_GAMMA_R
:
11096 case BUILT_IN_GAMMAF_R
:
11097 case BUILT_IN_GAMMAL_R
:
11098 case BUILT_IN_LGAMMA_R
:
11099 case BUILT_IN_LGAMMAF_R
:
11100 case BUILT_IN_LGAMMAL_R
:
11102 case BUILT_IN_FREXP
:
11103 case BUILT_IN_FREXPF
:
11104 case BUILT_IN_FREXPL
:
11105 case BUILT_IN_MODF
:
11106 case BUILT_IN_MODFF
:
11107 case BUILT_IN_MODFL
:
11109 case BUILT_IN_REMQUO
:
11110 case BUILT_IN_REMQUOF
:
11111 case BUILT_IN_REMQUOL
:
11113 case BUILT_IN_SINCOS
:
11114 case BUILT_IN_SINCOSF
:
11115 case BUILT_IN_SINCOSL
:
11117 case BUILT_IN_MEMSET
:
11118 case BUILT_IN_MEMSET_CHK
:
11119 case BUILT_IN_TM_MEMSET
:
11121 CASE_BUILT_IN_TM_STORE (1):
11122 CASE_BUILT_IN_TM_STORE (2):
11123 CASE_BUILT_IN_TM_STORE (4):
11124 CASE_BUILT_IN_TM_STORE (8):
11125 CASE_BUILT_IN_TM_STORE (FLOAT
):
11126 CASE_BUILT_IN_TM_STORE (DOUBLE
):
11127 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
11128 CASE_BUILT_IN_TM_STORE (M64
):
11129 CASE_BUILT_IN_TM_STORE (M128
):
11130 CASE_BUILT_IN_TM_STORE (M256
):
11132 case BUILT_IN_STACK_SAVE
:
11134 case BUILT_IN_ASSUME_ALIGNED
:
11136 /* But posix_memalign stores a pointer into the memory pointed to
11137 by its first argument. */
11138 case BUILT_IN_POSIX_MEMALIGN
: