1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
49 #include "tree-object-size.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
63 #include "stringpool.h"
67 #include "tree-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
74 struct target_builtins default_target_builtins
;
76 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names
[BUILT_IN_LAST
]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names
[(int) END_BUILTINS
] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info
[(int)END_BUILTINS
];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p
;
96 static rtx
c_readstr (const char *, scalar_int_mode
);
97 static int target_char_cast (tree
, char *);
98 static rtx
get_memory_rtx (tree
, tree
);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx
result_vector (int, rtx
);
102 static void expand_builtin_prefetch (tree
);
103 static rtx
expand_builtin_apply_args (void);
104 static rtx
expand_builtin_apply_args_1 (void);
105 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
106 static void expand_builtin_return (rtx
);
107 static enum type_class
type_to_class (tree
);
108 static rtx
expand_builtin_classify_type (tree
);
109 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
112 static rtx
expand_builtin_sincos (tree
);
113 static rtx
expand_builtin_cexpi (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
115 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
116 static rtx
expand_builtin_next_arg (void);
117 static rtx
expand_builtin_va_start (tree
);
118 static rtx
expand_builtin_va_end (tree
);
119 static rtx
expand_builtin_va_copy (tree
);
120 static rtx
expand_builtin_strcmp (tree
, rtx
);
121 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
122 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
123 static rtx
expand_builtin_memchr (tree
, rtx
);
124 static rtx
expand_builtin_memcpy (tree
, rtx
);
125 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
126 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
127 rtx target
, tree exp
, int endp
);
128 static rtx
expand_builtin_memmove (tree
, rtx
);
129 static rtx
expand_builtin_mempcpy (tree
, rtx
);
130 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
);
131 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, int);
132 static rtx
expand_builtin_strcat (tree
, rtx
);
133 static rtx
expand_builtin_strcpy (tree
, rtx
);
134 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
135 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
136 static rtx
expand_builtin_stpncpy (tree
, rtx
);
137 static rtx
expand_builtin_strncat (tree
, rtx
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
155 static bool validate_arg (const_tree
, enum tree_code code
);
156 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
157 static rtx
expand_builtin_signbit (tree
, rtx
);
158 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_isascii (location_t
, tree
);
160 static tree
fold_builtin_toascii (location_t
, tree
);
161 static tree
fold_builtin_isdigit (location_t
, tree
);
162 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
163 static tree
fold_builtin_abs (location_t
, tree
, tree
);
164 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
166 static tree
fold_builtin_0 (location_t
, tree
);
167 static tree
fold_builtin_1 (location_t
, tree
, tree
);
168 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
169 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
170 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
172 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
174 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
176 static rtx
expand_builtin_object_size (tree
);
177 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
178 enum built_in_function
);
179 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
180 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
181 static void maybe_emit_free_warning (tree
);
182 static tree
fold_builtin_object_size (tree
, tree
);
184 unsigned HOST_WIDE_INT target_newline
;
185 unsigned HOST_WIDE_INT target_percent
;
186 static unsigned HOST_WIDE_INT target_c
;
187 static unsigned HOST_WIDE_INT target_s
;
188 char target_percent_c
[3];
189 char target_percent_s
[3];
190 char target_percent_s_newline
[4];
191 static tree
do_mpfr_remquo (tree
, tree
, tree
);
192 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
198 is_builtin_name (const char *name
)
200 if (strncmp (name
, "__builtin_", 10) == 0)
202 if (strncmp (name
, "__sync_", 7) == 0)
204 if (strncmp (name
, "__atomic_", 9) == 0)
207 && (!strcmp (name
, "__cilkrts_detach")
208 || !strcmp (name
, "__cilkrts_pop_frame")))
214 /* Return true if DECL is a function symbol representing a built-in. */
217 is_builtin_fn (tree decl
)
219 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
227 called_as_built_in (tree node
)
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
232 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
233 return is_builtin_name (name
);
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
252 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
253 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
255 HOST_WIDE_INT bitsize
, bitpos
;
258 int unsignedp
, reversep
, volatilep
;
259 unsigned int align
= BITS_PER_UNIT
;
260 bool known_alignment
= false;
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
265 &unsignedp
, &reversep
, &volatilep
);
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp
) == FUNCTION_DECL
)
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
276 align
= 2 * BITS_PER_UNIT
;
278 else if (TREE_CODE (exp
) == LABEL_DECL
)
280 else if (TREE_CODE (exp
) == CONST_DECL
)
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp
= DECL_INITIAL (exp
);
284 align
= TYPE_ALIGN (TREE_TYPE (exp
));
285 if (CONSTANT_CLASS_P (exp
))
286 align
= targetm
.constant_alignment (exp
, align
);
288 known_alignment
= true;
290 else if (DECL_P (exp
))
292 align
= DECL_ALIGN (exp
);
293 known_alignment
= true;
295 else if (TREE_CODE (exp
) == INDIRECT_REF
296 || TREE_CODE (exp
) == MEM_REF
297 || TREE_CODE (exp
) == TARGET_MEM_REF
)
299 tree addr
= TREE_OPERAND (exp
, 0);
301 unsigned HOST_WIDE_INT ptr_bitpos
;
302 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr
) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
308 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
309 ptr_bitmask
*= BITS_PER_UNIT
;
310 align
= least_bit_hwi (ptr_bitmask
);
311 addr
= TREE_OPERAND (addr
, 0);
315 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
316 align
= MAX (ptr_align
, align
);
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos
&= ptr_bitmask
;
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
327 unsigned HOST_WIDE_INT step
= 1;
329 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
330 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
332 if (TMR_INDEX2 (exp
))
333 align
= BITS_PER_UNIT
;
334 known_alignment
= false;
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
343 if (!addr_p
&& !known_alignment
344 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
349 /* Else adjust bitpos accordingly. */
350 bitpos
+= ptr_bitpos
;
351 if (TREE_CODE (exp
) == MEM_REF
352 || TREE_CODE (exp
) == TARGET_MEM_REF
)
353 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
356 else if (TREE_CODE (exp
) == STRING_CST
)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align
= TYPE_ALIGN (TREE_TYPE (exp
));
361 if (CONSTANT_CLASS_P (exp
))
362 align
= targetm
.constant_alignment (exp
, align
);
364 known_alignment
= true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
371 unsigned int trailing_zeros
= tree_ctz (offset
);
372 if (trailing_zeros
< HOST_BITS_PER_INT
)
374 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
376 align
= MIN (align
, inner
);
381 *bitposp
= bitpos
& (*alignp
- 1);
382 return known_alignment
;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
391 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
392 unsigned HOST_WIDE_INT
*bitposp
)
394 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
397 /* Return the alignment in bits of EXP, an object. */
400 get_object_alignment (tree exp
)
402 unsigned HOST_WIDE_INT bitpos
= 0;
405 get_object_alignment_1 (exp
, &align
, &bitpos
);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
411 align
= least_bit_hwi (bitpos
);
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
423 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
424 unsigned HOST_WIDE_INT
*bitposp
)
428 if (TREE_CODE (exp
) == ADDR_EXPR
)
429 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
430 alignp
, bitposp
, true);
431 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
434 unsigned HOST_WIDE_INT bitpos
;
435 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
437 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
438 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
441 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
442 if (trailing_zeros
< HOST_BITS_PER_INT
)
444 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
446 align
= MIN (align
, inner
);
450 *bitposp
= bitpos
& (align
- 1);
453 else if (TREE_CODE (exp
) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp
)))
456 unsigned int ptr_align
, ptr_misalign
;
457 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
459 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
461 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
462 *alignp
= ptr_align
* BITS_PER_UNIT
;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
466 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
467 /* We cannot really tell whether this result is an approximation. */
473 *alignp
= BITS_PER_UNIT
;
477 else if (TREE_CODE (exp
) == INTEGER_CST
)
479 *alignp
= BIGGEST_ALIGNMENT
;
480 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
481 & (BIGGEST_ALIGNMENT
- 1));
486 *alignp
= BITS_PER_UNIT
;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
498 get_pointer_alignment (tree exp
)
500 unsigned HOST_WIDE_INT bitpos
= 0;
503 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
509 align
= least_bit_hwi (bitpos
);
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
519 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
521 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
527 /* Optimize the common case of plain char. */
528 for (n
= 0; n
< maxelts
; n
++)
530 const char *elt
= (const char*) ptr
+ n
;
537 for (n
= 0; n
< maxelts
; n
++)
539 const char *elt
= (const char*) ptr
+ n
* eltsize
;
540 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
570 c_strlen (tree src
, int only_value
)
573 if (TREE_CODE (src
) == COND_EXPR
574 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
578 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
579 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
580 if (tree_int_cst_equal (len1
, len2
))
584 if (TREE_CODE (src
) == COMPOUND_EXPR
585 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
586 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
588 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
590 /* Offset from the beginning of the string in bytes. */
592 src
= string_constant (src
, &byteoff
);
596 /* Determine the size of the string element. */
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
))));
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
602 unsigned maxelts
= TREE_STRING_LENGTH (src
) / eltsize
- 1;
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr
= TREE_STRING_POINTER (src
);
608 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr
, eltsize
, maxelts
) < maxelts
)
615 /* Return when an embedded null character is found. */
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
626 return size_diffop_loc (loc
, size_int (maxelts
* eltsize
), byteoff
);
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff
;
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
636 else if (! tree_fits_shwi_p (byteoff
))
639 eltoff
= tree_to_shwi (byteoff
) / eltsize
;
641 /* If the offset is known to be out of bounds, warn, and call strlen at
643 if (eltoff
< 0 || eltoff
> maxelts
)
645 /* Suppress multiple warnings for propagated constant strings. */
647 && !TREE_NO_WARNING (src
))
649 warning_at (loc
, 0, "offset %qwi outside bounds of constant string",
651 TREE_NO_WARNING (src
) = 1;
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
665 return ssize_int (len
);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
672 c_readstr (const char *str
, scalar_int_mode mode
)
676 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
678 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
679 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
680 / HOST_BITS_PER_WIDE_INT
;
682 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
683 for (i
= 0; i
< len
; i
++)
687 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
690 if (WORDS_BIG_ENDIAN
)
691 j
= GET_MODE_SIZE (mode
) - i
- 1;
692 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
694 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
698 ch
= (unsigned char) str
[i
];
699 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
702 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
703 return immed_wide_int_const (c
, mode
);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
711 target_char_cast (tree cst
, char *p
)
713 unsigned HOST_WIDE_INT val
, hostval
;
715 if (TREE_CODE (cst
) != INTEGER_CST
716 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
719 /* Do not care if it fits or not right here. */
720 val
= TREE_INT_CST_LOW (cst
);
722 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
723 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
726 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
727 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
741 builtin_save_expr (tree exp
)
743 if (TREE_CODE (exp
) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp
) == 0
745 && (TREE_CODE (exp
) == PARM_DECL
746 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
749 return save_expr (exp
);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
757 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
760 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
773 tem
= frame_pointer_rtx
;
776 tem
= hard_frame_pointer_rtx
;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl
->accesses_prior_frames
= 1;
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
793 /* Scan back COUNT frames to the specified frame. */
794 for (i
= 0; i
< count
; i
++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
799 tem
= memory_address (Pmode
, tem
);
800 tem
= gen_frame_mem (Pmode
, tem
);
801 tem
= copy_to_reg (tem
);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
807 return FRAME_ADDR_RTX (tem
);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem
= RETURN_ADDR_RTX (count
, tem
);
813 tem
= memory_address (Pmode
,
814 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
815 tem
= gen_frame_mem (Pmode
, tem
);
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set
= -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
828 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
830 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
834 if (setjmp_alias_set
== -1)
835 setjmp_alias_set
= new_alias_set ();
837 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
839 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
846 set_mem_alias_set (mem
, setjmp_alias_set
);
847 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
849 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
850 GET_MODE_SIZE (Pmode
))),
851 set_mem_alias_set (mem
, setjmp_alias_set
);
853 emit_move_insn (validize_mem (mem
),
854 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
856 stack_save
= gen_rtx_MEM (sa_mode
,
857 plus_constant (Pmode
, buf_addr
,
858 2 * GET_MODE_SIZE (Pmode
)));
859 set_mem_alias_set (stack_save
, setjmp_alias_set
);
860 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
862 /* If there is further processing to do, do it. */
863 if (targetm
.have_builtin_setjmp_setup ())
864 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
866 /* We have a nonlocal label. */
867 cfun
->has_nonlocal_label
= 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
875 expand_builtin_setjmp_receiver (rtx receiver_label
)
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx
);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
886 if (chain
&& REG_P (chain
))
887 emit_clobber (chain
);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm
.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 TARGET_STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx
);
911 emit_clobber (hard_frame_pointer_rtx
);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
922 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
924 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
925 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
926 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
929 if (i
== ARRAY_SIZE (elim_regs
))
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl
->args
.internal_arg_pointer
,
934 copy_to_reg (get_arg_pointer_save_area ()));
938 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
939 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
940 else if (targetm
.have_nonlocal_goto_receiver ())
941 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
957 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
960 rtx_insn
*insn
, *last
;
961 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
963 /* DRAP is needed for stack realign if longjmp is expanded to current
965 if (SUPPORTS_STACK_ALIGNMENT
)
966 crtl
->need_drap
= true;
968 if (setjmp_alias_set
== -1)
969 setjmp_alias_set
= new_alias_set ();
971 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
973 buf_addr
= force_reg (Pmode
, buf_addr
);
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value
== const1_rtx
);
979 last
= get_last_insn ();
980 if (targetm
.have_builtin_longjmp ())
981 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
984 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
985 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
986 GET_MODE_SIZE (Pmode
)));
988 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
989 2 * GET_MODE_SIZE (Pmode
)));
990 set_mem_alias_set (fp
, setjmp_alias_set
);
991 set_mem_alias_set (lab
, setjmp_alias_set
);
992 set_mem_alias_set (stack
, setjmp_alias_set
);
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm
.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1003 lab
= copy_to_reg (lab
);
1005 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1006 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1008 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1009 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1011 emit_use (hard_frame_pointer_rtx
);
1012 emit_use (stack_pointer_rtx
);
1013 emit_indirect_jump (lab
);
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1024 gcc_assert (insn
!= last
);
1028 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1031 else if (CALL_P (insn
))
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1039 return (iter
->i
< iter
->n
);
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1048 validate_arglist (const_tree callexpr
, ...)
1050 enum tree_code code
;
1053 const_call_expr_arg_iterator iter
;
1056 va_start (ap
, callexpr
);
1057 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn
= CALL_EXPR_FN (callexpr
);
1061 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1063 for (unsigned argno
= 1; ; ++argno
)
1065 code
= (enum tree_code
) va_arg (ap
, int);
1070 /* This signifies an ellipses, any further arguments are all ok. */
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res
= !more_const_call_expr_args_p (&iter
);
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1083 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1085 arg
= next_const_call_expr_arg (&iter
);
1086 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg
= next_const_call_expr_arg (&iter
);
1096 if (!validate_arg (arg
, code
))
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1107 BITMAP_FREE (argmap
);
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1116 expand_builtin_nonlocal_goto (tree exp
)
1118 tree t_label
, t_save_area
;
1119 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1122 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1125 t_label
= CALL_EXPR_ARG (exp
, 0);
1126 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1128 r_label
= expand_normal (t_label
);
1129 r_label
= convert_memory_address (Pmode
, r_label
);
1130 r_save_area
= expand_normal (t_save_area
);
1131 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area
= copy_to_reg (r_save_area
);
1135 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1136 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1137 plus_constant (Pmode
, r_save_area
,
1138 GET_MODE_SIZE (Pmode
)));
1140 crtl
->has_nonlocal_goto
= 1;
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm
.have_nonlocal_goto ())
1144 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1147 r_label
= copy_to_reg (r_label
);
1149 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1150 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1154 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx
);
1159 emit_use (stack_pointer_rtx
);
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1171 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1172 emit_use (pic_offset_table_rtx
);
1174 emit_indirect_jump (r_label
);
1177 /* Search backwards to the jump insn and mark it as a
1179 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1183 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1186 else if (CALL_P (insn
))
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1199 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1201 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1202 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1204 = gen_rtx_MEM (sa_mode
,
1207 plus_constant (Pmode
, buf_addr
,
1208 2 * GET_MODE_SIZE (Pmode
))));
1210 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1213 /* Expand a call to __builtin_prefetch. For a target that does not support
1214 data prefetch, evaluate the memory address argument in case it has side
1218 expand_builtin_prefetch (tree exp
)
1220 tree arg0
, arg1
, arg2
;
1224 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1227 arg0
= CALL_EXPR_ARG (exp
, 0);
1229 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1230 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1232 nargs
= call_expr_nargs (exp
);
1234 arg1
= CALL_EXPR_ARG (exp
, 1);
1236 arg1
= integer_zero_node
;
1238 arg2
= CALL_EXPR_ARG (exp
, 2);
1240 arg2
= integer_three_node
;
1242 /* Argument 0 is an address. */
1243 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1245 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1246 if (TREE_CODE (arg1
) != INTEGER_CST
)
1248 error ("second argument to %<__builtin_prefetch%> must be a constant");
1249 arg1
= integer_zero_node
;
1251 op1
= expand_normal (arg1
);
1252 /* Argument 1 must be either zero or one. */
1253 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1255 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1260 /* Argument 2 (locality) must be a compile-time constant int. */
1261 if (TREE_CODE (arg2
) != INTEGER_CST
)
1263 error ("third argument to %<__builtin_prefetch%> must be a constant");
1264 arg2
= integer_zero_node
;
1266 op2
= expand_normal (arg2
);
1267 /* Argument 2 must be 0, 1, 2, or 3. */
1268 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1270 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1274 if (targetm
.have_prefetch ())
1276 struct expand_operand ops
[3];
1278 create_address_operand (&ops
[0], op0
);
1279 create_integer_operand (&ops
[1], INTVAL (op1
));
1280 create_integer_operand (&ops
[2], INTVAL (op2
));
1281 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0
) && side_effects_p (op0
))
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1297 get_memory_rtx (tree exp
, tree len
)
1299 tree orig_exp
= exp
;
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1305 exp
= TREE_OPERAND (exp
, 0);
1307 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1308 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp
)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1314 exp
= TREE_OPERAND (exp
, 0);
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp
= fold_build2 (MEM_REF
,
1319 build_array_type (char_type_node
,
1320 build_range_type (sizetype
,
1321 size_one_node
, len
)),
1322 exp
, build_int_cst (ptr_type_node
, 0));
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1328 set_mem_attributes (mem
, exp
, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1330 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1333 exp
= build_fold_addr_expr (exp
);
1334 exp
= fold_build2 (MEM_REF
,
1335 build_array_type (char_type_node
,
1336 build_range_type (sizetype
,
1339 exp
, build_int_cst (ptr_type_node
, 0));
1340 set_mem_attributes (mem
, exp
, 0);
1342 set_mem_alias_set (mem
, 0);
1346 /* Built-in functions to perform an untyped call and return. */
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1357 apply_args_size (void)
1359 static int size
= -1;
1364 /* The values computed by this function never change. */
1367 /* The first value is the incoming arg-pointer. */
1368 size
= GET_MODE_SIZE (Pmode
);
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1373 size
+= GET_MODE_SIZE (Pmode
);
1375 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1376 if (FUNCTION_ARG_REGNO_P (regno
))
1378 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1380 gcc_assert (mode
!= VOIDmode
);
1382 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1383 if (size
% align
!= 0)
1384 size
= CEIL (size
, align
) * align
;
1385 size
+= GET_MODE_SIZE (mode
);
1386 apply_args_mode
[regno
] = mode
;
1390 apply_args_mode
[regno
] = VOIDmode
;
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1400 apply_result_size (void)
1402 static int size
= -1;
1406 /* The values computed by this function never change. */
1411 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1412 if (targetm
.calls
.function_value_regno_p (regno
))
1414 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1416 gcc_assert (mode
!= VOIDmode
);
1418 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1419 if (size
% align
!= 0)
1420 size
= CEIL (size
, align
) * align
;
1421 size
+= GET_MODE_SIZE (mode
);
1422 apply_result_mode
[regno
] = mode
;
1425 apply_result_mode
[regno
] = VOIDmode
;
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size
= APPLY_RESULT_SIZE
;
1436 /* Create a vector describing the result block RESULT. If SAVEP is true,
1437 the result block is used to save the values; otherwise it is used to
1438 restore the values. */
1441 result_vector (int savep
, rtx result
)
1443 int regno
, size
, align
, nelts
;
1446 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1449 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1450 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1452 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1453 if (size
% align
!= 0)
1454 size
= CEIL (size
, align
) * align
;
1455 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1456 mem
= adjust_address (result
, mode
, size
);
1457 savevec
[nelts
++] = (savep
1458 ? gen_rtx_SET (mem
, reg
)
1459 : gen_rtx_SET (reg
, mem
));
1460 size
+= GET_MODE_SIZE (mode
);
1462 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1465 /* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1469 expand_builtin_apply_args_1 (void)
1472 int size
, align
, regno
;
1474 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size
= GET_MODE_SIZE (Pmode
);
1482 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1483 size
+= GET_MODE_SIZE (Pmode
);
1485 /* Save each register used in calling a function to the block. */
1486 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1487 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1489 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1490 if (size
% align
!= 0)
1491 size
= CEIL (size
, align
) * align
;
1493 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1495 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1496 size
+= GET_MODE_SIZE (mode
);
1499 /* Save the arg pointer to the block. */
1500 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 if (STACK_GROWS_DOWNWARD
)
1506 = force_operand (plus_constant (Pmode
, tem
,
1507 crtl
->args
.pretend_args_size
),
1509 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1511 size
= GET_MODE_SIZE (Pmode
);
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value
)
1517 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1518 copy_to_reg (struct_incoming_value
));
1519 size
+= GET_MODE_SIZE (Pmode
);
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers
, 0));
1526 /* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1534 expand_builtin_apply_args (void)
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value
!= 0)
1539 return apply_args_value
;
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1547 temp
= expand_builtin_apply_args_1 ();
1548 rtx_insn
*seq
= get_insns ();
1551 apply_args_value
= temp
;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1559 push_topmost_sequence ();
1560 if (REG_P (crtl
->args
.internal_arg_pointer
)
1561 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1562 emit_insn_before (seq
, parm_birth_insn
);
1564 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1574 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1576 int size
, align
, regno
;
1578 rtx incoming_args
, result
, reg
, dest
, src
;
1579 rtx_call_insn
*call_insn
;
1580 rtx old_stack_level
= 0;
1581 rtx call_fusage
= 0;
1582 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1584 arguments
= convert_memory_address (Pmode
, arguments
);
1586 /* Create a block where the return registers can be saved. */
1587 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args
= gen_reg_rtx (Pmode
);
1591 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1592 if (!STACK_GROWS_DOWNWARD
)
1593 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1594 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1599 do_pending_stack_adjust ();
1602 /* Save the stack with nonlocal if available. */
1603 if (targetm
.have_save_stack_nonlocal ())
1604 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1606 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1608 /* Allocate a block of memory onto the stack and copy the memory
1609 arguments to the outgoing arguments address. We can pass TRUE
1610 as the 4th argument because we just saved the stack pointer
1611 and will restore it right after the call. */
1612 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1614 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1615 may have already set current_function_calls_alloca to true.
1616 current_function_calls_alloca won't be set if argsize is zero,
1617 so we have to guarantee need_drap is true here. */
1618 if (SUPPORTS_STACK_ALIGNMENT
)
1619 crtl
->need_drap
= true;
1621 dest
= virtual_outgoing_args_rtx
;
1622 if (!STACK_GROWS_DOWNWARD
)
1624 if (CONST_INT_P (argsize
))
1625 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1627 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1629 dest
= gen_rtx_MEM (BLKmode
, dest
);
1630 set_mem_align (dest
, PARM_BOUNDARY
);
1631 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1632 set_mem_align (src
, PARM_BOUNDARY
);
1633 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1635 /* Refer to the argument block. */
1637 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1638 set_mem_align (arguments
, PARM_BOUNDARY
);
1640 /* Walk past the arg-pointer and structure value address. */
1641 size
= GET_MODE_SIZE (Pmode
);
1643 size
+= GET_MODE_SIZE (Pmode
);
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1648 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1650 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1651 if (size
% align
!= 0)
1652 size
= CEIL (size
, align
) * align
;
1653 reg
= gen_rtx_REG (mode
, regno
);
1654 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1655 use_reg (&call_fusage
, reg
);
1656 size
+= GET_MODE_SIZE (mode
);
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size
= GET_MODE_SIZE (Pmode
);
1664 rtx value
= gen_reg_rtx (Pmode
);
1665 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1666 emit_move_insn (struct_value
, value
);
1667 if (REG_P (struct_value
))
1668 use_reg (&call_fusage
, struct_value
);
1669 size
+= GET_MODE_SIZE (Pmode
);
1672 /* All arguments and registers used for the call are set up by now! */
1673 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function
) != SYMBOL_REF
)
1679 function
= memory_address (FUNCTION_MODE
, function
);
1681 /* Generate the actual call instruction and save the return value. */
1682 if (targetm
.have_untyped_call ())
1684 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1685 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1686 result_vector (1, result
)));
1688 else if (targetm
.have_call_value ())
1692 /* Locate the unique return register. It is not possible to
1693 express a call that sets more than one return register using
1694 call_value; use untyped_call for that. In fact, untyped_call
1695 only needs to save the return registers in the given block. */
1696 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1697 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1699 gcc_assert (!valreg
); /* have_untyped_call required. */
1701 valreg
= gen_rtx_REG (mode
, regno
);
1704 emit_insn (targetm
.gen_call_value (valreg
,
1705 gen_rtx_MEM (FUNCTION_MODE
, function
),
1706 const0_rtx
, NULL_RTX
, const0_rtx
));
1708 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1713 /* Find the CALL insn we just emitted, and attach the register usage
1715 call_insn
= last_call_insn ();
1716 add_function_usage_to (call_insn
, call_fusage
);
1718 /* Restore the stack. */
1719 if (targetm
.have_save_stack_nonlocal ())
1720 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1722 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1723 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1727 /* Return the address of the result block. */
1728 result
= copy_addr_to_reg (XEXP (result
, 0));
1729 return convert_memory_address (ptr_mode
, result
);
1732 /* Perform an untyped return. */
1735 expand_builtin_return (rtx result
)
1737 int size
, align
, regno
;
1740 rtx_insn
*call_fusage
= 0;
1742 result
= convert_memory_address (Pmode
, result
);
1744 apply_result_size ();
1745 result
= gen_rtx_MEM (BLKmode
, result
);
1747 if (targetm
.have_untyped_return ())
1749 rtx vector
= result_vector (0, result
);
1750 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1755 /* Restore the return value and note that each value is used. */
1757 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1758 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1760 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1761 if (size
% align
!= 0)
1762 size
= CEIL (size
, align
) * align
;
1763 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1764 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1766 push_to_sequence (call_fusage
);
1768 call_fusage
= get_insns ();
1770 size
+= GET_MODE_SIZE (mode
);
1773 /* Put the USE insns before the return. */
1774 emit_insn (call_fusage
);
1776 /* Return whatever values was restored by jumping directly to the end
1778 expand_naked_return ();
1781 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1783 static enum type_class
1784 type_to_class (tree type
)
1786 switch (TREE_CODE (type
))
1788 case VOID_TYPE
: return void_type_class
;
1789 case INTEGER_TYPE
: return integer_type_class
;
1790 case ENUMERAL_TYPE
: return enumeral_type_class
;
1791 case BOOLEAN_TYPE
: return boolean_type_class
;
1792 case POINTER_TYPE
: return pointer_type_class
;
1793 case REFERENCE_TYPE
: return reference_type_class
;
1794 case OFFSET_TYPE
: return offset_type_class
;
1795 case REAL_TYPE
: return real_type_class
;
1796 case COMPLEX_TYPE
: return complex_type_class
;
1797 case FUNCTION_TYPE
: return function_type_class
;
1798 case METHOD_TYPE
: return method_type_class
;
1799 case RECORD_TYPE
: return record_type_class
;
1801 case QUAL_UNION_TYPE
: return union_type_class
;
1802 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1803 ? string_type_class
: array_type_class
);
1804 case LANG_TYPE
: return lang_type_class
;
1805 default: return no_type_class
;
1809 /* Expand a call EXP to __builtin_classify_type. */
1812 expand_builtin_classify_type (tree exp
)
1814 if (call_expr_nargs (exp
))
1815 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1816 return GEN_INT (no_type_class
);
1819 /* This helper macro, meant to be used in mathfn_built_in below, determines
1820 which among a set of builtin math functions is appropriate for a given type
1821 mode. The `F' (float) and `L' (long double) are automatically generated
1822 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1823 types, there are additional types that are considered with 'F32', 'F64',
1824 'F128', etc. suffixes. */
1825 #define CASE_MATHFN(MATHFN) \
1826 CASE_CFN_##MATHFN: \
1827 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1828 fcodel = BUILT_IN_##MATHFN##L ; break;
1829 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1831 #define CASE_MATHFN_FLOATN(MATHFN) \
1832 CASE_CFN_##MATHFN: \
1833 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1834 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1835 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1836 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1837 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1839 /* Similar to above, but appends _R after any F/L suffix. */
1840 #define CASE_MATHFN_REENT(MATHFN) \
1841 case CFN_BUILT_IN_##MATHFN##_R: \
1842 case CFN_BUILT_IN_##MATHFN##F_R: \
1843 case CFN_BUILT_IN_##MATHFN##L_R: \
1844 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1845 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1847 /* Return a function equivalent to FN but operating on floating-point
1848 values of type TYPE, or END_BUILTINS if no such function exists.
1849 This is purely an operation on function codes; it does not guarantee
1850 that the target actually has an implementation of the function. */
1852 static built_in_function
1853 mathfn_built_in_2 (tree type
, combined_fn fn
)
1856 built_in_function fcode
, fcodef
, fcodel
;
1857 built_in_function fcodef16
= END_BUILTINS
;
1858 built_in_function fcodef32
= END_BUILTINS
;
1859 built_in_function fcodef64
= END_BUILTINS
;
1860 built_in_function fcodef128
= END_BUILTINS
;
1861 built_in_function fcodef32x
= END_BUILTINS
;
1862 built_in_function fcodef64x
= END_BUILTINS
;
1863 built_in_function fcodef128x
= END_BUILTINS
;
1877 CASE_MATHFN_FLOATN (COPYSIGN
)
1890 CASE_MATHFN_FLOATN (FMA
)
1891 CASE_MATHFN_FLOATN (FMAX
)
1892 CASE_MATHFN_FLOATN (FMIN
)
1896 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1897 CASE_MATHFN (HUGE_VAL
)
1901 CASE_MATHFN (IFLOOR
)
1904 CASE_MATHFN (IROUND
)
1911 CASE_MATHFN (LFLOOR
)
1912 CASE_MATHFN (LGAMMA
)
1913 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1914 CASE_MATHFN (LLCEIL
)
1915 CASE_MATHFN (LLFLOOR
)
1916 CASE_MATHFN (LLRINT
)
1917 CASE_MATHFN (LLROUND
)
1924 CASE_MATHFN (LROUND
)
1928 CASE_MATHFN (NEARBYINT
)
1929 CASE_MATHFN (NEXTAFTER
)
1930 CASE_MATHFN (NEXTTOWARD
)
1934 CASE_MATHFN (REMAINDER
)
1935 CASE_MATHFN (REMQUO
)
1939 CASE_MATHFN (SCALBLN
)
1940 CASE_MATHFN (SCALBN
)
1941 CASE_MATHFN (SIGNBIT
)
1942 CASE_MATHFN (SIGNIFICAND
)
1944 CASE_MATHFN (SINCOS
)
1946 CASE_MATHFN_FLOATN (SQRT
)
1949 CASE_MATHFN (TGAMMA
)
1956 return END_BUILTINS
;
1959 mtype
= TYPE_MAIN_VARIANT (type
);
1960 if (mtype
== double_type_node
)
1962 else if (mtype
== float_type_node
)
1964 else if (mtype
== long_double_type_node
)
1966 else if (mtype
== float16_type_node
)
1968 else if (mtype
== float32_type_node
)
1970 else if (mtype
== float64_type_node
)
1972 else if (mtype
== float128_type_node
)
1974 else if (mtype
== float32x_type_node
)
1976 else if (mtype
== float64x_type_node
)
1978 else if (mtype
== float128x_type_node
)
1981 return END_BUILTINS
;
1984 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1985 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1986 otherwise use the explicit declaration. If we can't do the conversion,
1990 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
1992 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
1993 if (fcode2
== END_BUILTINS
)
1996 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1999 return builtin_decl_explicit (fcode2
);
2002 /* Like mathfn_built_in_1, but always use the implicit array. */
2005 mathfn_built_in (tree type
, combined_fn fn
)
2007 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2010 /* Like mathfn_built_in_1, but take a built_in_function and
2011 always use the implicit array. */
2014 mathfn_built_in (tree type
, enum built_in_function fn
)
2016 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2019 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2020 return its code, otherwise return IFN_LAST. Note that this function
2021 only tests whether the function is defined in internals.def, not whether
2022 it is actually available on the target. */
2025 associated_internal_fn (tree fndecl
)
2027 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2028 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2029 switch (DECL_FUNCTION_CODE (fndecl
))
2031 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2032 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2033 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2034 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2035 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2036 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2038 #include "internal-fn.def"
2040 CASE_FLT_FN (BUILT_IN_POW10
):
2043 CASE_FLT_FN (BUILT_IN_DREM
):
2044 return IFN_REMAINDER
;
2046 CASE_FLT_FN (BUILT_IN_SCALBN
):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2048 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2057 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2058 on the current target by a call to an internal function, return the
2059 code of that internal function, otherwise return IFN_LAST. The caller
2060 is responsible for ensuring that any side-effects of the built-in
2061 call are dealt with correctly. E.g. if CALL sets errno, the caller
2062 must decide that the errno result isn't needed or make it available
2063 in some other way. */
2066 replacement_internal_fn (gcall
*call
)
2068 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2070 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2071 if (ifn
!= IFN_LAST
)
2073 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2074 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2075 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2082 /* Expand a call to the builtin trinary math functions (fma).
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2090 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2092 optab builtin_optab
;
2093 rtx op0
, op1
, op2
, result
;
2095 tree fndecl
= get_callee_fndecl (exp
);
2096 tree arg0
, arg1
, arg2
;
2099 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2102 arg0
= CALL_EXPR_ARG (exp
, 0);
2103 arg1
= CALL_EXPR_ARG (exp
, 1);
2104 arg2
= CALL_EXPR_ARG (exp
, 2);
2106 switch (DECL_FUNCTION_CODE (fndecl
))
2108 CASE_FLT_FN (BUILT_IN_FMA
):
2109 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2110 builtin_optab
= fma_optab
; break;
2115 /* Make a suitable register to place result in. */
2116 mode
= TYPE_MODE (TREE_TYPE (exp
));
2118 /* Before working hard, check whether the instruction is available. */
2119 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2122 result
= gen_reg_rtx (mode
);
2124 /* Always stabilize the argument list. */
2125 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2126 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2127 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2129 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2130 op1
= expand_normal (arg1
);
2131 op2
= expand_normal (arg2
);
2135 /* Compute into RESULT.
2136 Set RESULT to wherever the result comes back. */
2137 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2146 return expand_call (exp
, target
, target
== const0_rtx
);
2149 /* Output the entire sequence. */
2150 insns
= get_insns ();
2157 /* Expand a call to the builtin sin and cos math functions.
2158 Return NULL_RTX if a normal call should be emitted rather than expanding the
2159 function in-line. EXP is the expression that is a call to the builtin
2160 function; if convenient, the result should be placed in TARGET.
2161 SUBTARGET may be used as the target for computing one of EXP's
2165 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2167 optab builtin_optab
;
2170 tree fndecl
= get_callee_fndecl (exp
);
2174 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2177 arg
= CALL_EXPR_ARG (exp
, 0);
2179 switch (DECL_FUNCTION_CODE (fndecl
))
2181 CASE_FLT_FN (BUILT_IN_SIN
):
2182 CASE_FLT_FN (BUILT_IN_COS
):
2183 builtin_optab
= sincos_optab
; break;
2188 /* Make a suitable register to place result in. */
2189 mode
= TYPE_MODE (TREE_TYPE (exp
));
2191 /* Check if sincos insn is available, otherwise fallback
2192 to sin or cos insn. */
2193 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2194 switch (DECL_FUNCTION_CODE (fndecl
))
2196 CASE_FLT_FN (BUILT_IN_SIN
):
2197 builtin_optab
= sin_optab
; break;
2198 CASE_FLT_FN (BUILT_IN_COS
):
2199 builtin_optab
= cos_optab
; break;
2204 /* Before working hard, check whether the instruction is available. */
2205 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2207 rtx result
= gen_reg_rtx (mode
);
2209 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2210 need to expand the argument again. This way, we will not perform
2211 side-effects more the once. */
2212 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2214 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2218 /* Compute into RESULT.
2219 Set RESULT to wherever the result comes back. */
2220 if (builtin_optab
== sincos_optab
)
2224 switch (DECL_FUNCTION_CODE (fndecl
))
2226 CASE_FLT_FN (BUILT_IN_SIN
):
2227 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2229 CASE_FLT_FN (BUILT_IN_COS
):
2230 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2238 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2242 /* Output the entire sequence. */
2243 insns
= get_insns ();
2249 /* If we were unable to expand via the builtin, stop the sequence
2250 (without outputting the insns) and call to the library function
2251 with the stabilized argument list. */
2255 return expand_call (exp
, target
, target
== const0_rtx
);
2258 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2259 return an RTL instruction code that implements the functionality.
2260 If that isn't possible or available return CODE_FOR_nothing. */
2262 static enum insn_code
2263 interclass_mathfn_icode (tree arg
, tree fndecl
)
2265 bool errno_set
= false;
2266 optab builtin_optab
= unknown_optab
;
2269 switch (DECL_FUNCTION_CODE (fndecl
))
2271 CASE_FLT_FN (BUILT_IN_ILOGB
):
2272 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2273 CASE_FLT_FN (BUILT_IN_ISINF
):
2274 builtin_optab
= isinf_optab
; break;
2275 case BUILT_IN_ISNORMAL
:
2276 case BUILT_IN_ISFINITE
:
2277 CASE_FLT_FN (BUILT_IN_FINITE
):
2278 case BUILT_IN_FINITED32
:
2279 case BUILT_IN_FINITED64
:
2280 case BUILT_IN_FINITED128
:
2281 case BUILT_IN_ISINFD32
:
2282 case BUILT_IN_ISINFD64
:
2283 case BUILT_IN_ISINFD128
:
2284 /* These builtins have no optabs (yet). */
2290 /* There's no easy way to detect the case we need to set EDOM. */
2291 if (flag_errno_math
&& errno_set
)
2292 return CODE_FOR_nothing
;
2294 /* Optab mode depends on the mode of the input argument. */
2295 mode
= TYPE_MODE (TREE_TYPE (arg
));
2298 return optab_handler (builtin_optab
, mode
);
2299 return CODE_FOR_nothing
;
2302 /* Expand a call to one of the builtin math functions that operate on
2303 floating point argument and output an integer result (ilogb, isinf,
2305 Return 0 if a normal call should be emitted rather than expanding the
2306 function in-line. EXP is the expression that is a call to the builtin
2307 function; if convenient, the result should be placed in TARGET. */
2310 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2312 enum insn_code icode
= CODE_FOR_nothing
;
2314 tree fndecl
= get_callee_fndecl (exp
);
2318 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2321 arg
= CALL_EXPR_ARG (exp
, 0);
2322 icode
= interclass_mathfn_icode (arg
, fndecl
);
2323 mode
= TYPE_MODE (TREE_TYPE (arg
));
2325 if (icode
!= CODE_FOR_nothing
)
2327 struct expand_operand ops
[1];
2328 rtx_insn
*last
= get_last_insn ();
2329 tree orig_arg
= arg
;
2331 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2332 need to expand the argument again. This way, we will not perform
2333 side-effects more the once. */
2334 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2336 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2338 if (mode
!= GET_MODE (op0
))
2339 op0
= convert_to_mode (mode
, op0
, 0);
2341 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2342 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2343 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2344 return ops
[0].value
;
2346 delete_insns_since (last
);
2347 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2353 /* Expand a call to the builtin sincos math function.
2354 Return NULL_RTX if a normal call should be emitted rather than expanding the
2355 function in-line. EXP is the expression that is a call to the builtin
2359 expand_builtin_sincos (tree exp
)
2361 rtx op0
, op1
, op2
, target1
, target2
;
2363 tree arg
, sinp
, cosp
;
2365 location_t loc
= EXPR_LOCATION (exp
);
2366 tree alias_type
, alias_off
;
2368 if (!validate_arglist (exp
, REAL_TYPE
,
2369 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2372 arg
= CALL_EXPR_ARG (exp
, 0);
2373 sinp
= CALL_EXPR_ARG (exp
, 1);
2374 cosp
= CALL_EXPR_ARG (exp
, 2);
2376 /* Make a suitable register to place result in. */
2377 mode
= TYPE_MODE (TREE_TYPE (arg
));
2379 /* Check if sincos insn is available, otherwise emit the call. */
2380 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2383 target1
= gen_reg_rtx (mode
);
2384 target2
= gen_reg_rtx (mode
);
2386 op0
= expand_normal (arg
);
2387 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2388 alias_off
= build_int_cst (alias_type
, 0);
2389 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2391 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2394 /* Compute into target1 and target2.
2395 Set TARGET to wherever the result comes back. */
2396 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2397 gcc_assert (result
);
2399 /* Move target1 and target2 to the memory locations indicated
2401 emit_move_insn (op1
, target1
);
2402 emit_move_insn (op2
, target2
);
2407 /* Expand a call to the internal cexpi builtin to the sincos math function.
2408 EXP is the expression that is a call to the builtin function; if convenient,
2409 the result should be placed in TARGET. */
2412 expand_builtin_cexpi (tree exp
, rtx target
)
2414 tree fndecl
= get_callee_fndecl (exp
);
2418 location_t loc
= EXPR_LOCATION (exp
);
2420 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2423 arg
= CALL_EXPR_ARG (exp
, 0);
2424 type
= TREE_TYPE (arg
);
2425 mode
= TYPE_MODE (TREE_TYPE (arg
));
2427 /* Try expanding via a sincos optab, fall back to emitting a libcall
2428 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2429 is only generated from sincos, cexp or if we have either of them. */
2430 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2432 op1
= gen_reg_rtx (mode
);
2433 op2
= gen_reg_rtx (mode
);
2435 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2437 /* Compute into op1 and op2. */
2438 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2440 else if (targetm
.libc_has_function (function_sincos
))
2442 tree call
, fn
= NULL_TREE
;
2446 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2447 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2448 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2449 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2450 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2451 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2455 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2456 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2457 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2458 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2459 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2460 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2462 /* Make sure not to fold the sincos call again. */
2463 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2464 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2465 call
, 3, arg
, top1
, top2
));
2469 tree call
, fn
= NULL_TREE
, narg
;
2470 tree ctype
= build_complex_type (type
);
2472 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2473 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2474 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2475 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2476 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2477 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2481 /* If we don't have a decl for cexp create one. This is the
2482 friendliest fallback if the user calls __builtin_cexpi
2483 without full target C99 function support. */
2484 if (fn
== NULL_TREE
)
2487 const char *name
= NULL
;
2489 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2491 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2493 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2496 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2497 fn
= build_fn_decl (name
, fntype
);
2500 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2501 build_real (type
, dconst0
), arg
);
2503 /* Make sure not to fold the cexp call again. */
2504 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2505 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2506 target
, VOIDmode
, EXPAND_NORMAL
);
2509 /* Now build the proper return type. */
2510 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2511 make_tree (TREE_TYPE (arg
), op2
),
2512 make_tree (TREE_TYPE (arg
), op1
)),
2513 target
, VOIDmode
, EXPAND_NORMAL
);
2516 /* Conveniently construct a function call expression. FNDECL names the
2517 function to be called, N is the number of arguments, and the "..."
2518 parameters are the argument expressions. Unlike build_call_exr
2519 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2522 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2525 tree fntype
= TREE_TYPE (fndecl
);
2526 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2529 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2531 SET_EXPR_LOCATION (fn
, loc
);
2535 /* Expand a call to one of the builtin rounding functions gcc defines
2536 as an extension (lfloor and lceil). As these are gcc extensions we
2537 do not need to worry about setting errno to EDOM.
2538 If expanding via optab fails, lower expression to (int)(floor(x)).
2539 EXP is the expression that is a call to the builtin function;
2540 if convenient, the result should be placed in TARGET. */
2543 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2545 convert_optab builtin_optab
;
2548 tree fndecl
= get_callee_fndecl (exp
);
2549 enum built_in_function fallback_fn
;
2550 tree fallback_fndecl
;
2554 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2557 arg
= CALL_EXPR_ARG (exp
, 0);
2559 switch (DECL_FUNCTION_CODE (fndecl
))
2561 CASE_FLT_FN (BUILT_IN_ICEIL
):
2562 CASE_FLT_FN (BUILT_IN_LCEIL
):
2563 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2564 builtin_optab
= lceil_optab
;
2565 fallback_fn
= BUILT_IN_CEIL
;
2568 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2569 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2570 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2571 builtin_optab
= lfloor_optab
;
2572 fallback_fn
= BUILT_IN_FLOOR
;
2579 /* Make a suitable register to place result in. */
2580 mode
= TYPE_MODE (TREE_TYPE (exp
));
2582 target
= gen_reg_rtx (mode
);
2584 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2585 need to expand the argument again. This way, we will not perform
2586 side-effects more the once. */
2587 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2589 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2593 /* Compute into TARGET. */
2594 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2596 /* Output the entire sequence. */
2597 insns
= get_insns ();
2603 /* If we were unable to expand via the builtin, stop the sequence
2604 (without outputting the insns). */
2607 /* Fall back to floating point rounding optab. */
2608 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2610 /* For non-C99 targets we may end up without a fallback fndecl here
2611 if the user called __builtin_lfloor directly. In this case emit
2612 a call to the floor/ceil variants nevertheless. This should result
2613 in the best user experience for not full C99 targets. */
2614 if (fallback_fndecl
== NULL_TREE
)
2617 const char *name
= NULL
;
2619 switch (DECL_FUNCTION_CODE (fndecl
))
2621 case BUILT_IN_ICEIL
:
2622 case BUILT_IN_LCEIL
:
2623 case BUILT_IN_LLCEIL
:
2626 case BUILT_IN_ICEILF
:
2627 case BUILT_IN_LCEILF
:
2628 case BUILT_IN_LLCEILF
:
2631 case BUILT_IN_ICEILL
:
2632 case BUILT_IN_LCEILL
:
2633 case BUILT_IN_LLCEILL
:
2636 case BUILT_IN_IFLOOR
:
2637 case BUILT_IN_LFLOOR
:
2638 case BUILT_IN_LLFLOOR
:
2641 case BUILT_IN_IFLOORF
:
2642 case BUILT_IN_LFLOORF
:
2643 case BUILT_IN_LLFLOORF
:
2646 case BUILT_IN_IFLOORL
:
2647 case BUILT_IN_LFLOORL
:
2648 case BUILT_IN_LLFLOORL
:
2655 fntype
= build_function_type_list (TREE_TYPE (arg
),
2656 TREE_TYPE (arg
), NULL_TREE
);
2657 fallback_fndecl
= build_fn_decl (name
, fntype
);
2660 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2662 tmp
= expand_normal (exp
);
2663 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2665 /* Truncate the result of floating point optab to integer
2666 via expand_fix (). */
2667 target
= gen_reg_rtx (mode
);
2668 expand_fix (target
, tmp
, 0);
2673 /* Expand a call to one of the builtin math functions doing integer
2675 Return 0 if a normal call should be emitted rather than expanding the
2676 function in-line. EXP is the expression that is a call to the builtin
2677 function; if convenient, the result should be placed in TARGET. */
2680 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2682 convert_optab builtin_optab
;
2685 tree fndecl
= get_callee_fndecl (exp
);
2688 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2690 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2693 arg
= CALL_EXPR_ARG (exp
, 0);
2695 switch (DECL_FUNCTION_CODE (fndecl
))
2697 CASE_FLT_FN (BUILT_IN_IRINT
):
2698 fallback_fn
= BUILT_IN_LRINT
;
2700 CASE_FLT_FN (BUILT_IN_LRINT
):
2701 CASE_FLT_FN (BUILT_IN_LLRINT
):
2702 builtin_optab
= lrint_optab
;
2705 CASE_FLT_FN (BUILT_IN_IROUND
):
2706 fallback_fn
= BUILT_IN_LROUND
;
2708 CASE_FLT_FN (BUILT_IN_LROUND
):
2709 CASE_FLT_FN (BUILT_IN_LLROUND
):
2710 builtin_optab
= lround_optab
;
2717 /* There's no easy way to detect the case we need to set EDOM. */
2718 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2721 /* Make a suitable register to place result in. */
2722 mode
= TYPE_MODE (TREE_TYPE (exp
));
2724 /* There's no easy way to detect the case we need to set EDOM. */
2725 if (!flag_errno_math
)
2727 rtx result
= gen_reg_rtx (mode
);
2729 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2730 need to expand the argument again. This way, we will not perform
2731 side-effects more the once. */
2732 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2734 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2738 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2740 /* Output the entire sequence. */
2741 insns
= get_insns ();
2747 /* If we were unable to expand via the builtin, stop the sequence
2748 (without outputting the insns) and call to the library function
2749 with the stabilized argument list. */
2753 if (fallback_fn
!= BUILT_IN_NONE
)
2755 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2756 targets, (int) round (x) should never be transformed into
2757 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2758 a call to lround in the hope that the target provides at least some
2759 C99 functions. This should result in the best user experience for
2760 not full C99 targets. */
2761 tree fallback_fndecl
= mathfn_built_in_1
2762 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2764 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2765 fallback_fndecl
, 1, arg
);
2767 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2768 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2769 return convert_to_mode (mode
, target
, 0);
2772 return expand_call (exp
, target
, target
== const0_rtx
);
2775 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2776 a normal call should be emitted rather than expanding the function
2777 in-line. EXP is the expression that is a call to the builtin
2778 function; if convenient, the result should be placed in TARGET. */
2781 expand_builtin_powi (tree exp
, rtx target
)
2788 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2791 arg0
= CALL_EXPR_ARG (exp
, 0);
2792 arg1
= CALL_EXPR_ARG (exp
, 1);
2793 mode
= TYPE_MODE (TREE_TYPE (exp
));
2795 /* Emit a libcall to libgcc. */
2797 /* Mode of the 2nd argument must match that of an int. */
2798 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2800 if (target
== NULL_RTX
)
2801 target
= gen_reg_rtx (mode
);
2803 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2804 if (GET_MODE (op0
) != mode
)
2805 op0
= convert_to_mode (mode
, op0
, 0);
2806 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2807 if (GET_MODE (op1
) != mode2
)
2808 op1
= convert_to_mode (mode2
, op1
, 0);
2810 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2811 target
, LCT_CONST
, mode
,
2812 op0
, mode
, op1
, mode2
);
2817 /* Expand expression EXP which is a call to the strlen builtin. Return
2818 NULL_RTX if we failed the caller should emit a normal call, otherwise
2819 try to get the result in TARGET, if convenient. */
2822 expand_builtin_strlen (tree exp
, rtx target
,
2823 machine_mode target_mode
)
2825 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2829 struct expand_operand ops
[4];
2832 tree src
= CALL_EXPR_ARG (exp
, 0);
2834 rtx_insn
*before_strlen
;
2835 machine_mode insn_mode
;
2836 enum insn_code icode
= CODE_FOR_nothing
;
2839 /* If the length can be computed at compile-time, return it. */
2840 len
= c_strlen (src
, 0);
2842 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2844 /* If the length can be computed at compile-time and is constant
2845 integer, but there are side-effects in src, evaluate
2846 src for side-effects, then return len.
2847 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2848 can be optimized into: i++; x = 3; */
2849 len
= c_strlen (src
, 1);
2850 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2852 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2853 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2856 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2858 /* If SRC is not a pointer type, don't do this operation inline. */
2862 /* Bail out if we can't compute strlen in the right mode. */
2863 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2865 icode
= optab_handler (strlen_optab
, insn_mode
);
2866 if (icode
!= CODE_FOR_nothing
)
2869 if (insn_mode
== VOIDmode
)
2872 /* Make a place to hold the source address. We will not expand
2873 the actual source until we are sure that the expansion will
2874 not fail -- there are trees that cannot be expanded twice. */
2875 src_reg
= gen_reg_rtx (Pmode
);
2877 /* Mark the beginning of the strlen sequence so we can emit the
2878 source operand later. */
2879 before_strlen
= get_last_insn ();
2881 create_output_operand (&ops
[0], target
, insn_mode
);
2882 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2883 create_integer_operand (&ops
[2], 0);
2884 create_integer_operand (&ops
[3], align
);
2885 if (!maybe_expand_insn (icode
, 4, ops
))
2888 /* Now that we are assured of success, expand the source. */
2890 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2893 #ifdef POINTERS_EXTEND_UNSIGNED
2894 if (GET_MODE (pat
) != Pmode
)
2895 pat
= convert_to_mode (Pmode
, pat
,
2896 POINTERS_EXTEND_UNSIGNED
);
2898 emit_move_insn (src_reg
, pat
);
2904 emit_insn_after (pat
, before_strlen
);
2906 emit_insn_before (pat
, get_insns ());
2908 /* Return the value in the proper mode for this function. */
2909 if (GET_MODE (ops
[0].value
) == target_mode
)
2910 target
= ops
[0].value
;
2911 else if (target
!= 0)
2912 convert_move (target
, ops
[0].value
, 0);
2914 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2920 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2921 bytes from constant string DATA + OFFSET and return it as target
2925 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2926 scalar_int_mode mode
)
2928 const char *str
= (const char *) data
;
2930 gcc_assert (offset
>= 0
2931 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2932 <= strlen (str
) + 1));
2934 return c_readstr (str
+ offset
, mode
);
2937 /* LEN specify length of the block of memcpy/memset operation.
2938 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2939 In some cases we can make very likely guess on max size, then we
2940 set it into PROBABLE_MAX_SIZE. */
2943 determine_block_size (tree len
, rtx len_rtx
,
2944 unsigned HOST_WIDE_INT
*min_size
,
2945 unsigned HOST_WIDE_INT
*max_size
,
2946 unsigned HOST_WIDE_INT
*probable_max_size
)
2948 if (CONST_INT_P (len_rtx
))
2950 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
2956 enum value_range_type range_type
= VR_UNDEFINED
;
2958 /* Determine bounds from the type. */
2959 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
2960 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
2963 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
2964 *probable_max_size
= *max_size
2965 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
2967 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
2969 if (TREE_CODE (len
) == SSA_NAME
)
2970 range_type
= get_range_info (len
, &min
, &max
);
2971 if (range_type
== VR_RANGE
)
2973 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
2974 *min_size
= min
.to_uhwi ();
2975 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
2976 *probable_max_size
= *max_size
= max
.to_uhwi ();
2978 else if (range_type
== VR_ANTI_RANGE
)
2980 /* Anti range 0...N lets us to determine minimal size to N+1. */
2983 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
2984 *min_size
= max
.to_uhwi () + 1;
2992 Produce anti range allowing negative values of N. We still
2993 can use the information and make a guess that N is not negative.
2995 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
2996 *probable_max_size
= min
.to_uhwi () - 1;
2999 gcc_checking_assert (*max_size
<=
3000 (unsigned HOST_WIDE_INT
)
3001 GET_MODE_MASK (GET_MODE (len_rtx
)));
3004 /* Try to verify that the sizes and lengths of the arguments to a string
3005 manipulation function given by EXP are within valid bounds and that
3006 the operation does not lead to buffer overflow. Arguments other than
3007 EXP may be null. When non-null, the arguments have the following
3009 SIZE is the user-supplied size argument to the function (such as in
3010 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3011 number of bytes to write.
3012 MAXLEN is the user-supplied bound on the length of the source sequence
3013 (such as in strncat(d, s, N). It specifies the upper limit on the number
3015 SRC is the source string (such as in strcpy(d, s)) when the expression
3016 EXP is a string function call (as opposed to a memory call like memcpy).
3017 As an exception, SRC can also be an integer denoting the precomputed
3018 size of the source string or object (for functions like memcpy).
3019 OBJSIZE is the size of the destination object specified by the last
3020 argument to the _chk builtins, typically resulting from the expansion
3021 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3024 When SIZE is null LEN is checked to verify that it doesn't exceed
3027 If the call is successfully verified as safe from buffer overflow
3028 the function returns true, otherwise false.. */
3031 check_sizes (int opt
, tree exp
, tree size
, tree maxlen
, tree src
, tree objsize
)
3033 /* The size of the largest object is half the address space, or
3034 SSIZE_MAX. (This is way too permissive.) */
3035 tree maxobjsize
= TYPE_MAX_VALUE (ssizetype
);
3037 tree slen
= NULL_TREE
;
3039 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3041 /* Set to true when the exact number of bytes written by a string
3042 function like strcpy is not known and the only thing that is
3043 known is that it must be at least one (for the terminating nul). */
3044 bool at_least_one
= false;
3047 /* SRC is normally a pointer to string but as a special case
3048 it can be an integer denoting the length of a string. */
3049 if (POINTER_TYPE_P (TREE_TYPE (src
)))
3051 /* Try to determine the range of lengths the source string
3052 refers to. If it can be determined and is less than
3053 the upper bound given by MAXLEN add one to it for
3054 the terminating nul. Otherwise, set it to one for
3055 the same reason, or to MAXLEN as appropriate. */
3056 get_range_strlen (src
, range
);
3057 if (range
[0] && (!maxlen
|| TREE_CODE (maxlen
) == INTEGER_CST
))
3059 if (maxlen
&& tree_int_cst_le (maxlen
, range
[0]))
3060 range
[0] = range
[1] = maxlen
;
3062 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3063 range
[0], size_one_node
);
3065 if (maxlen
&& tree_int_cst_le (maxlen
, range
[1]))
3067 else if (!integer_all_onesp (range
[1]))
3068 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3069 range
[1], size_one_node
);
3075 at_least_one
= true;
3076 slen
= size_one_node
;
3083 if (!size
&& !maxlen
)
3085 /* When the only available piece of data is the object size
3086 there is nothing to do. */
3090 /* Otherwise, when the length of the source sequence is known
3091 (as with with strlen), set SIZE to it. */
3097 objsize
= maxobjsize
;
3099 /* The SIZE is exact if it's non-null, constant, and in range of
3100 unsigned HOST_WIDE_INT. */
3101 bool exactsize
= size
&& tree_fits_uhwi_p (size
);
3104 get_size_range (size
, range
);
3106 /* First check the number of bytes to be written against the maximum
3108 if (range
[0] && tree_int_cst_lt (maxobjsize
, range
[0]))
3110 location_t loc
= tree_nonartificial_location (exp
);
3111 loc
= expansion_point_location_if_in_system_header (loc
);
3113 if (range
[0] == range
[1])
3114 warning_at (loc
, opt
,
3115 "%K%qD specified size %E "
3116 "exceeds maximum object size %E",
3117 exp
, get_callee_fndecl (exp
), range
[0], maxobjsize
);
3119 warning_at (loc
, opt
,
3120 "%K%qD specified size between %E and %E "
3121 "exceeds maximum object size %E",
3122 exp
, get_callee_fndecl (exp
),
3123 range
[0], range
[1], maxobjsize
);
3127 /* Next check the number of bytes to be written against the destination
3129 if (range
[0] || !exactsize
|| integer_all_onesp (size
))
3132 && ((tree_fits_uhwi_p (objsize
)
3133 && tree_int_cst_lt (objsize
, range
[0]))
3134 || (tree_fits_uhwi_p (size
)
3135 && tree_int_cst_lt (size
, range
[0]))))
3137 location_t loc
= tree_nonartificial_location (exp
);
3138 loc
= expansion_point_location_if_in_system_header (loc
);
3140 if (size
== slen
&& at_least_one
)
3142 /* This is a call to strcpy with a destination of 0 size
3143 and a source of unknown length. The call will write
3144 at least one byte past the end of the destination. */
3145 warning_at (loc
, opt
,
3146 "%K%qD writing %E or more bytes into a region "
3147 "of size %E overflows the destination",
3148 exp
, get_callee_fndecl (exp
), range
[0], objsize
);
3150 else if (tree_int_cst_equal (range
[0], range
[1]))
3151 warning_at (loc
, opt
,
3152 (integer_onep (range
[0])
3153 ? G_("%K%qD writing %E byte into a region "
3154 "of size %E overflows the destination")
3155 : G_("%K%qD writing %E bytes into a region "
3156 "of size %E overflows the destination")),
3157 exp
, get_callee_fndecl (exp
), range
[0], objsize
);
3158 else if (tree_int_cst_sign_bit (range
[1]))
3160 /* Avoid printing the upper bound if it's invalid. */
3161 warning_at (loc
, opt
,
3162 "%K%qD writing %E or more bytes into a region "
3163 "of size %E overflows the destination",
3164 exp
, get_callee_fndecl (exp
), range
[0], objsize
);
3167 warning_at (loc
, opt
,
3168 "%K%qD writing between %E and %E bytes into "
3169 "a region of size %E overflows the destination",
3170 exp
, get_callee_fndecl (exp
), range
[0], range
[1],
3173 /* Return error when an overflow has been detected. */
3178 /* Check the maximum length of the source sequence against the size
3179 of the destination object if known, or against the maximum size
3183 get_size_range (maxlen
, range
);
3185 if (range
[0] && objsize
&& tree_fits_uhwi_p (objsize
))
3187 location_t loc
= tree_nonartificial_location (exp
);
3188 loc
= expansion_point_location_if_in_system_header (loc
);
3190 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3192 /* Warn about crazy big sizes first since that's more
3193 likely to be meaningful than saying that the bound
3194 is greater than the object size if both are big. */
3195 if (range
[0] == range
[1])
3196 warning_at (loc
, opt
,
3197 "%K%qD specified bound %E "
3198 "exceeds maximum object size %E",
3199 exp
, get_callee_fndecl (exp
),
3200 range
[0], maxobjsize
);
3202 warning_at (loc
, opt
,
3203 "%K%qD specified bound between %E and %E "
3204 "exceeds maximum object size %E",
3205 exp
, get_callee_fndecl (exp
),
3206 range
[0], range
[1], maxobjsize
);
3211 if (objsize
!= maxobjsize
&& tree_int_cst_lt (objsize
, range
[0]))
3213 if (tree_int_cst_equal (range
[0], range
[1]))
3214 warning_at (loc
, opt
,
3215 "%K%qD specified bound %E "
3216 "exceeds destination size %E",
3217 exp
, get_callee_fndecl (exp
),
3220 warning_at (loc
, opt
,
3221 "%K%qD specified bound between %E and %E "
3222 "exceeds destination size %E",
3223 exp
, get_callee_fndecl (exp
),
3224 range
[0], range
[1], objsize
);
3233 && tree_int_cst_lt (slen
, range
[0]))
3235 location_t loc
= tree_nonartificial_location (exp
);
3237 if (tree_int_cst_equal (range
[0], range
[1]))
3238 warning_at (loc
, opt
,
3239 (tree_int_cst_equal (range
[0], integer_one_node
)
3240 ? G_("%K%qD reading %E byte from a region of size %E")
3241 : G_("%K%qD reading %E bytes from a region of size %E")),
3242 exp
, get_callee_fndecl (exp
), range
[0], slen
);
3243 else if (tree_int_cst_sign_bit (range
[1]))
3245 /* Avoid printing the upper bound if it's invalid. */
3246 warning_at (loc
, opt
,
3247 "%K%qD reading %E or more bytes from a region "
3249 exp
, get_callee_fndecl (exp
), range
[0], slen
);
3252 warning_at (loc
, opt
,
3253 "%K%qD reading between %E and %E bytes from a region "
3255 exp
, get_callee_fndecl (exp
), range
[0], range
[1], slen
);
3262 /* Helper to compute the size of the object referenced by the DEST
3263 expression which must of of pointer type, using Object Size type
3264 OSTYPE (only the least significant 2 bits are used). Return
3265 the size of the object if successful or NULL when the size cannot
3269 compute_objsize (tree dest
, int ostype
)
3271 unsigned HOST_WIDE_INT size
;
3272 if (compute_builtin_object_size (dest
, ostype
& 3, &size
))
3273 return build_int_cst (sizetype
, size
);
3278 /* Helper to determine and check the sizes of the source and the destination
3279 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3280 call expression, DEST is the destination argument, SRC is the source
3281 argument or null, and LEN is the number of bytes. Use Object Size type-0
3282 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3283 (no overflow or invalid sizes), false otherwise. */
3286 check_memop_sizes (tree exp
, tree dest
, tree src
, tree size
)
3288 if (!warn_stringop_overflow
)
3291 /* For functions like memset and memcpy that operate on raw memory
3292 try to determine the size of the largest source and destination
3293 object using type-0 Object Size regardless of the object size
3294 type specified by the option. */
3295 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3296 tree dstsize
= compute_objsize (dest
, 0);
3298 return check_sizes (OPT_Wstringop_overflow_
, exp
,
3299 size
, /*maxlen=*/NULL_TREE
, srcsize
, dstsize
);
3302 /* Validate memchr arguments without performing any expansion.
3306 expand_builtin_memchr (tree exp
, rtx
)
3308 if (!validate_arglist (exp
,
3309 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3312 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3313 tree len
= CALL_EXPR_ARG (exp
, 2);
3315 /* Diagnose calls where the specified length exceeds the size
3317 if (warn_stringop_overflow
)
3319 tree size
= compute_objsize (arg1
, 0);
3320 check_sizes (OPT_Wstringop_overflow_
,
3321 exp
, len
, /*maxlen=*/NULL_TREE
,
3322 size
, /*objsize=*/NULL_TREE
);
3328 /* Expand a call EXP to the memcpy builtin.
3329 Return NULL_RTX if we failed, the caller should emit a normal call,
3330 otherwise try to get the result in TARGET, if convenient (and in
3331 mode MODE if that's convenient). */
3334 expand_builtin_memcpy (tree exp
, rtx target
)
3336 if (!validate_arglist (exp
,
3337 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3340 tree dest
= CALL_EXPR_ARG (exp
, 0);
3341 tree src
= CALL_EXPR_ARG (exp
, 1);
3342 tree len
= CALL_EXPR_ARG (exp
, 2);
3344 check_memop_sizes (exp
, dest
, src
, len
);
3346 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3350 /* Check a call EXP to the memmove built-in for validity.
3351 Return NULL_RTX on both success and failure. */
3354 expand_builtin_memmove (tree exp
, rtx
)
3356 if (!validate_arglist (exp
,
3357 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3360 tree dest
= CALL_EXPR_ARG (exp
, 0);
3361 tree src
= CALL_EXPR_ARG (exp
, 1);
3362 tree len
= CALL_EXPR_ARG (exp
, 2);
3364 check_memop_sizes (exp
, dest
, src
, len
);
3369 /* Expand an instrumented call EXP to the memcpy builtin.
3370 Return NULL_RTX if we failed, the caller should emit a normal call,
3371 otherwise try to get the result in TARGET, if convenient (and in
3372 mode MODE if that's convenient). */
3375 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3377 if (!validate_arglist (exp
,
3378 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3379 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3380 INTEGER_TYPE
, VOID_TYPE
))
3384 tree dest
= CALL_EXPR_ARG (exp
, 0);
3385 tree src
= CALL_EXPR_ARG (exp
, 2);
3386 tree len
= CALL_EXPR_ARG (exp
, 4);
3387 rtx res
= expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3390 /* Return src bounds with the result. */
3393 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3394 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3395 res
= chkp_join_splitted_slot (res
, bnd
);
3401 /* Expand a call EXP to the mempcpy builtin.
3402 Return NULL_RTX if we failed; the caller should emit a normal call,
3403 otherwise try to get the result in TARGET, if convenient (and in
3404 mode MODE if that's convenient). If ENDP is 0 return the
3405 destination pointer, if ENDP is 1 return the end pointer ala
3406 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3410 expand_builtin_mempcpy (tree exp
, rtx target
)
3412 if (!validate_arglist (exp
,
3413 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3416 tree dest
= CALL_EXPR_ARG (exp
, 0);
3417 tree src
= CALL_EXPR_ARG (exp
, 1);
3418 tree len
= CALL_EXPR_ARG (exp
, 2);
3420 /* Avoid expanding mempcpy into memcpy when the call is determined
3421 to overflow the buffer. This also prevents the same overflow
3422 from being diagnosed again when expanding memcpy. */
3423 if (!check_memop_sizes (exp
, dest
, src
, len
))
3426 return expand_builtin_mempcpy_args (dest
, src
, len
,
3427 target
, exp
, /*endp=*/ 1);
3430 /* Expand an instrumented call EXP to the mempcpy builtin.
3431 Return NULL_RTX if we failed, the caller should emit a normal call,
3432 otherwise try to get the result in TARGET, if convenient (and in
3433 mode MODE if that's convenient). */
3436 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
)
3438 if (!validate_arglist (exp
,
3439 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3440 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3441 INTEGER_TYPE
, VOID_TYPE
))
3445 tree dest
= CALL_EXPR_ARG (exp
, 0);
3446 tree src
= CALL_EXPR_ARG (exp
, 2);
3447 tree len
= CALL_EXPR_ARG (exp
, 4);
3448 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3451 /* Return src bounds with the result. */
3454 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3455 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3456 res
= chkp_join_splitted_slot (res
, bnd
);
3462 /* Helper function to do the actual work for expand of memory copy family
3463 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3464 of memory from SRC to DEST and assign to TARGET if convenient.
3465 If ENDP is 0 return the
3466 destination pointer, if ENDP is 1 return the end pointer ala
3467 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3471 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3472 rtx target
, tree exp
, int endp
)
3474 const char *src_str
;
3475 unsigned int src_align
= get_pointer_alignment (src
);
3476 unsigned int dest_align
= get_pointer_alignment (dest
);
3477 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3478 HOST_WIDE_INT expected_size
= -1;
3479 unsigned int expected_align
= 0;
3480 unsigned HOST_WIDE_INT min_size
;
3481 unsigned HOST_WIDE_INT max_size
;
3482 unsigned HOST_WIDE_INT probable_max_size
;
3484 /* If DEST is not a pointer type, call the normal function. */
3485 if (dest_align
== 0)
3488 /* If either SRC is not a pointer type, don't do this
3489 operation in-line. */
3493 if (currently_expanding_gimple_stmt
)
3494 stringop_block_profile (currently_expanding_gimple_stmt
,
3495 &expected_align
, &expected_size
);
3497 if (expected_align
< dest_align
)
3498 expected_align
= dest_align
;
3499 dest_mem
= get_memory_rtx (dest
, len
);
3500 set_mem_align (dest_mem
, dest_align
);
3501 len_rtx
= expand_normal (len
);
3502 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3503 &probable_max_size
);
3504 src_str
= c_getstr (src
);
3506 /* If SRC is a string constant and block move would be done
3507 by pieces, we can avoid loading the string from memory
3508 and only stored the computed constants. */
3510 && CONST_INT_P (len_rtx
)
3511 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3512 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3513 CONST_CAST (char *, src_str
),
3516 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3517 builtin_memcpy_read_str
,
3518 CONST_CAST (char *, src_str
),
3519 dest_align
, false, endp
);
3520 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3521 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3525 src_mem
= get_memory_rtx (src
, len
);
3526 set_mem_align (src_mem
, src_align
);
3528 /* Copy word part most expediently. */
3529 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3530 CALL_EXPR_TAILCALL (exp
)
3531 && (endp
== 0 || target
== const0_rtx
)
3532 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3533 expected_align
, expected_size
,
3534 min_size
, max_size
, probable_max_size
);
3538 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3539 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3542 if (endp
&& target
!= const0_rtx
)
3544 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3545 /* stpcpy pointer to last byte. */
3547 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3554 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3555 rtx target
, tree orig_exp
, int endp
)
3557 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3561 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3562 we failed, the caller should emit a normal call, otherwise try to
3563 get the result in TARGET, if convenient. If ENDP is 0 return the
3564 destination pointer, if ENDP is 1 return the end pointer ala
3565 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3569 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3571 struct expand_operand ops
[3];
3575 if (!targetm
.have_movstr ())
3578 dest_mem
= get_memory_rtx (dest
, NULL
);
3579 src_mem
= get_memory_rtx (src
, NULL
);
3582 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3583 dest_mem
= replace_equiv_address (dest_mem
, target
);
3586 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3587 create_fixed_operand (&ops
[1], dest_mem
);
3588 create_fixed_operand (&ops
[2], src_mem
);
3589 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3592 if (endp
&& target
!= const0_rtx
)
3594 target
= ops
[0].value
;
3595 /* movstr is supposed to set end to the address of the NUL
3596 terminator. If the caller requested a mempcpy-like return value,
3600 rtx tem
= plus_constant (GET_MODE (target
),
3601 gen_lowpart (GET_MODE (target
), target
), 1);
3602 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3608 /* Do some very basic size validation of a call to the strcpy builtin
3609 given by EXP. Return NULL_RTX to have the built-in expand to a call
3610 to the library function. */
3613 expand_builtin_strcat (tree exp
, rtx
)
3615 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3616 || !warn_stringop_overflow
)
3619 tree dest
= CALL_EXPR_ARG (exp
, 0);
3620 tree src
= CALL_EXPR_ARG (exp
, 1);
3622 /* There is no way here to determine the length of the string in
3623 the destination to which the SRC string is being appended so
3624 just diagnose cases when the souce string is longer than
3625 the destination object. */
3627 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3629 check_sizes (OPT_Wstringop_overflow_
,
3630 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3635 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3636 NULL_RTX if we failed the caller should emit a normal call, otherwise
3637 try to get the result in TARGET, if convenient (and in mode MODE if that's
3641 expand_builtin_strcpy (tree exp
, rtx target
)
3643 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3646 tree dest
= CALL_EXPR_ARG (exp
, 0);
3647 tree src
= CALL_EXPR_ARG (exp
, 1);
3649 if (warn_stringop_overflow
)
3651 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3652 check_sizes (OPT_Wstringop_overflow_
,
3653 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3656 return expand_builtin_strcpy_args (dest
, src
, target
);
3659 /* Helper function to do the actual work for expand_builtin_strcpy. The
3660 arguments to the builtin_strcpy call DEST and SRC are broken out
3661 so that this can also be called without constructing an actual CALL_EXPR.
3662 The other arguments and return value are the same as for
3663 expand_builtin_strcpy. */
3666 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3668 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3671 /* Expand a call EXP to the stpcpy builtin.
3672 Return NULL_RTX if we failed the caller should emit a normal call,
3673 otherwise try to get the result in TARGET, if convenient (and in
3674 mode MODE if that's convenient). */
3677 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3680 location_t loc
= EXPR_LOCATION (exp
);
3682 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3685 dst
= CALL_EXPR_ARG (exp
, 0);
3686 src
= CALL_EXPR_ARG (exp
, 1);
3688 if (warn_stringop_overflow
)
3690 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
3691 check_sizes (OPT_Wstringop_overflow_
,
3692 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3695 /* If return value is ignored, transform stpcpy into strcpy. */
3696 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3698 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3699 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3700 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3707 /* Ensure we get an actual string whose length can be evaluated at
3708 compile-time, not an expression containing a string. This is
3709 because the latter will potentially produce pessimized code
3710 when used to produce the return value. */
3711 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3712 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3714 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3715 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3716 target
, exp
, /*endp=*/2);
3721 if (TREE_CODE (len
) == INTEGER_CST
)
3723 rtx len_rtx
= expand_normal (len
);
3725 if (CONST_INT_P (len_rtx
))
3727 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3733 if (mode
!= VOIDmode
)
3734 target
= gen_reg_rtx (mode
);
3736 target
= gen_reg_rtx (GET_MODE (ret
));
3738 if (GET_MODE (target
) != GET_MODE (ret
))
3739 ret
= gen_lowpart (GET_MODE (target
), ret
);
3741 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3742 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3750 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3754 /* Check a call EXP to the stpncpy built-in for validity.
3755 Return NULL_RTX on both success and failure. */
3758 expand_builtin_stpncpy (tree exp
, rtx
)
3760 if (!validate_arglist (exp
,
3761 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3762 || !warn_stringop_overflow
)
3765 /* The source and destination of the call. */
3766 tree dest
= CALL_EXPR_ARG (exp
, 0);
3767 tree src
= CALL_EXPR_ARG (exp
, 1);
3769 /* The exact number of bytes to write (not the maximum). */
3770 tree len
= CALL_EXPR_ARG (exp
, 2);
3772 /* The size of the destination object. */
3773 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3775 check_sizes (OPT_Wstringop_overflow_
,
3776 exp
, len
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3781 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3782 bytes from constant string DATA + OFFSET and return it as target
3786 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3787 scalar_int_mode mode
)
3789 const char *str
= (const char *) data
;
3791 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3794 return c_readstr (str
+ offset
, mode
);
3797 /* Helper to check the sizes of sequences and the destination of calls
3798 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3799 success (no overflow or invalid sizes), false otherwise. */
3802 check_strncat_sizes (tree exp
, tree objsize
)
3804 tree dest
= CALL_EXPR_ARG (exp
, 0);
3805 tree src
= CALL_EXPR_ARG (exp
, 1);
3806 tree maxlen
= CALL_EXPR_ARG (exp
, 2);
3808 /* Try to determine the range of lengths that the source expression
3811 get_range_strlen (src
, lenrange
);
3813 /* Try to verify that the destination is big enough for the shortest
3816 if (!objsize
&& warn_stringop_overflow
)
3818 /* If it hasn't been provided by __strncat_chk, try to determine
3819 the size of the destination object into which the source is
3821 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3824 /* Add one for the terminating nul. */
3825 tree srclen
= (lenrange
[0]
3826 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3830 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3831 nul so the specified upper bound should never be equal to (or greater
3832 than) the size of the destination. */
3833 if (tree_fits_uhwi_p (maxlen
) && tree_fits_uhwi_p (objsize
)
3834 && tree_int_cst_equal (objsize
, maxlen
))
3836 location_t loc
= tree_nonartificial_location (exp
);
3837 loc
= expansion_point_location_if_in_system_header (loc
);
3839 warning_at (loc
, OPT_Wstringop_overflow_
,
3840 "%K%qD specified bound %E equals destination size",
3841 exp
, get_callee_fndecl (exp
), maxlen
);
3847 || (maxlen
&& tree_fits_uhwi_p (maxlen
)
3848 && tree_fits_uhwi_p (srclen
)
3849 && tree_int_cst_lt (maxlen
, srclen
)))
3852 /* The number of bytes to write is LEN but check_sizes will also
3853 check SRCLEN if LEN's value isn't known. */
3854 return check_sizes (OPT_Wstringop_overflow_
,
3855 exp
, /*size=*/NULL_TREE
, maxlen
, srclen
, objsize
);
3858 /* Similar to expand_builtin_strcat, do some very basic size validation
3859 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3860 the built-in expand to a call to the library function. */
3863 expand_builtin_strncat (tree exp
, rtx
)
3865 if (!validate_arglist (exp
,
3866 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3867 || !warn_stringop_overflow
)
3870 tree dest
= CALL_EXPR_ARG (exp
, 0);
3871 tree src
= CALL_EXPR_ARG (exp
, 1);
3872 /* The upper bound on the number of bytes to write. */
3873 tree maxlen
= CALL_EXPR_ARG (exp
, 2);
3874 /* The length of the source sequence. */
3875 tree slen
= c_strlen (src
, 1);
3877 /* Try to determine the range of lengths that the source expression
3881 lenrange
[0] = lenrange
[1] = slen
;
3883 get_range_strlen (src
, lenrange
);
3885 /* Try to verify that the destination is big enough for the shortest
3886 string. First try to determine the size of the destination object
3887 into which the source is being copied. */
3888 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3890 /* Add one for the terminating nul. */
3891 tree srclen
= (lenrange
[0]
3892 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3896 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3897 nul so the specified upper bound should never be equal to (or greater
3898 than) the size of the destination. */
3899 if (tree_fits_uhwi_p (maxlen
) && tree_fits_uhwi_p (destsize
)
3900 && tree_int_cst_equal (destsize
, maxlen
))
3902 location_t loc
= tree_nonartificial_location (exp
);
3903 loc
= expansion_point_location_if_in_system_header (loc
);
3905 warning_at (loc
, OPT_Wstringop_overflow_
,
3906 "%K%qD specified bound %E equals destination size",
3907 exp
, get_callee_fndecl (exp
), maxlen
);
3913 || (maxlen
&& tree_fits_uhwi_p (maxlen
)
3914 && tree_fits_uhwi_p (srclen
)
3915 && tree_int_cst_lt (maxlen
, srclen
)))
3918 /* The number of bytes to write is LEN but check_sizes will also
3919 check SRCLEN if LEN's value isn't known. */
3920 check_sizes (OPT_Wstringop_overflow_
,
3921 exp
, /*size=*/NULL_TREE
, maxlen
, srclen
, destsize
);
3926 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3927 NULL_RTX if we failed the caller should emit a normal call. */
3930 expand_builtin_strncpy (tree exp
, rtx target
)
3932 location_t loc
= EXPR_LOCATION (exp
);
3934 if (validate_arglist (exp
,
3935 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3937 tree dest
= CALL_EXPR_ARG (exp
, 0);
3938 tree src
= CALL_EXPR_ARG (exp
, 1);
3939 /* The number of bytes to write (not the maximum). */
3940 tree len
= CALL_EXPR_ARG (exp
, 2);
3941 /* The length of the source sequence. */
3942 tree slen
= c_strlen (src
, 1);
3944 if (warn_stringop_overflow
)
3946 tree destsize
= compute_objsize (dest
,
3947 warn_stringop_overflow
- 1);
3949 /* The number of bytes to write is LEN but check_sizes will also
3950 check SLEN if LEN's value isn't known. */
3951 check_sizes (OPT_Wstringop_overflow_
,
3952 exp
, len
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3955 /* We must be passed a constant len and src parameter. */
3956 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3959 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3961 /* We're required to pad with trailing zeros if the requested
3962 len is greater than strlen(s2)+1. In that case try to
3963 use store_by_pieces, if it fails, punt. */
3964 if (tree_int_cst_lt (slen
, len
))
3966 unsigned int dest_align
= get_pointer_alignment (dest
);
3967 const char *p
= c_getstr (src
);
3970 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3971 || !can_store_by_pieces (tree_to_uhwi (len
),
3972 builtin_strncpy_read_str
,
3973 CONST_CAST (char *, p
),
3977 dest_mem
= get_memory_rtx (dest
, len
);
3978 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3979 builtin_strncpy_read_str
,
3980 CONST_CAST (char *, p
), dest_align
, false, 0);
3981 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3982 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3989 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3990 bytes from constant string DATA + OFFSET and return it as target
3994 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3995 scalar_int_mode mode
)
3997 const char *c
= (const char *) data
;
3998 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4000 memset (p
, *c
, GET_MODE_SIZE (mode
));
4002 return c_readstr (p
, mode
);
4005 /* Callback routine for store_by_pieces. Return the RTL of a register
4006 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4007 char value given in the RTL register data. For example, if mode is
4008 4 bytes wide, return the RTL for 0x01010101*data. */
4011 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4012 scalar_int_mode mode
)
4018 size
= GET_MODE_SIZE (mode
);
4022 p
= XALLOCAVEC (char, size
);
4023 memset (p
, 1, size
);
4024 coeff
= c_readstr (p
, mode
);
4026 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4027 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4028 return force_reg (mode
, target
);
4031 /* Expand expression EXP, which is a call to the memset builtin. Return
4032 NULL_RTX if we failed the caller should emit a normal call, otherwise
4033 try to get the result in TARGET, if convenient (and in mode MODE if that's
4037 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4039 if (!validate_arglist (exp
,
4040 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4043 tree dest
= CALL_EXPR_ARG (exp
, 0);
4044 tree val
= CALL_EXPR_ARG (exp
, 1);
4045 tree len
= CALL_EXPR_ARG (exp
, 2);
4047 check_memop_sizes (exp
, dest
, NULL_TREE
, len
);
4049 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4052 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4053 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4054 try to get the result in TARGET, if convenient (and in mode MODE if that's
4058 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
4060 if (!validate_arglist (exp
,
4061 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
4062 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4066 tree dest
= CALL_EXPR_ARG (exp
, 0);
4067 tree val
= CALL_EXPR_ARG (exp
, 2);
4068 tree len
= CALL_EXPR_ARG (exp
, 3);
4069 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4071 /* Return src bounds with the result. */
4074 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
4075 expand_normal (CALL_EXPR_ARG (exp
, 1)));
4076 res
= chkp_join_splitted_slot (res
, bnd
);
4082 /* Helper function to do the actual work for expand_builtin_memset. The
4083 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4084 so that this can also be called without constructing an actual CALL_EXPR.
4085 The other arguments and return value are the same as for
4086 expand_builtin_memset. */
4089 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4090 rtx target
, machine_mode mode
, tree orig_exp
)
4093 enum built_in_function fcode
;
4094 machine_mode val_mode
;
4096 unsigned int dest_align
;
4097 rtx dest_mem
, dest_addr
, len_rtx
;
4098 HOST_WIDE_INT expected_size
= -1;
4099 unsigned int expected_align
= 0;
4100 unsigned HOST_WIDE_INT min_size
;
4101 unsigned HOST_WIDE_INT max_size
;
4102 unsigned HOST_WIDE_INT probable_max_size
;
4104 dest_align
= get_pointer_alignment (dest
);
4106 /* If DEST is not a pointer type, don't do this operation in-line. */
4107 if (dest_align
== 0)
4110 if (currently_expanding_gimple_stmt
)
4111 stringop_block_profile (currently_expanding_gimple_stmt
,
4112 &expected_align
, &expected_size
);
4114 if (expected_align
< dest_align
)
4115 expected_align
= dest_align
;
4117 /* If the LEN parameter is zero, return DEST. */
4118 if (integer_zerop (len
))
4120 /* Evaluate and ignore VAL in case it has side-effects. */
4121 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4122 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4125 /* Stabilize the arguments in case we fail. */
4126 dest
= builtin_save_expr (dest
);
4127 val
= builtin_save_expr (val
);
4128 len
= builtin_save_expr (len
);
4130 len_rtx
= expand_normal (len
);
4131 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4132 &probable_max_size
);
4133 dest_mem
= get_memory_rtx (dest
, len
);
4134 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4136 if (TREE_CODE (val
) != INTEGER_CST
)
4140 val_rtx
= expand_normal (val
);
4141 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4143 /* Assume that we can memset by pieces if we can store
4144 * the coefficients by pieces (in the required modes).
4145 * We can't pass builtin_memset_gen_str as that emits RTL. */
4147 if (tree_fits_uhwi_p (len
)
4148 && can_store_by_pieces (tree_to_uhwi (len
),
4149 builtin_memset_read_str
, &c
, dest_align
,
4152 val_rtx
= force_reg (val_mode
, val_rtx
);
4153 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4154 builtin_memset_gen_str
, val_rtx
, dest_align
,
4157 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4158 dest_align
, expected_align
,
4159 expected_size
, min_size
, max_size
,
4163 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4164 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4168 if (target_char_cast (val
, &c
))
4173 if (tree_fits_uhwi_p (len
)
4174 && can_store_by_pieces (tree_to_uhwi (len
),
4175 builtin_memset_read_str
, &c
, dest_align
,
4177 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4178 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4179 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4180 gen_int_mode (c
, val_mode
),
4181 dest_align
, expected_align
,
4182 expected_size
, min_size
, max_size
,
4186 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4187 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4191 set_mem_align (dest_mem
, dest_align
);
4192 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4193 CALL_EXPR_TAILCALL (orig_exp
)
4194 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4195 expected_align
, expected_size
,
4201 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4202 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4208 fndecl
= get_callee_fndecl (orig_exp
);
4209 fcode
= DECL_FUNCTION_CODE (fndecl
);
4210 if (fcode
== BUILT_IN_MEMSET
4211 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
4212 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4214 else if (fcode
== BUILT_IN_BZERO
)
4215 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4219 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4220 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4221 return expand_call (fn
, target
, target
== const0_rtx
);
4224 /* Expand expression EXP, which is a call to the bzero builtin. Return
4225 NULL_RTX if we failed the caller should emit a normal call. */
4228 expand_builtin_bzero (tree exp
)
4230 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4233 tree dest
= CALL_EXPR_ARG (exp
, 0);
4234 tree size
= CALL_EXPR_ARG (exp
, 1);
4236 check_memop_sizes (exp
, dest
, NULL_TREE
, size
);
4238 /* New argument list transforming bzero(ptr x, int y) to
4239 memset(ptr x, int 0, size_t y). This is done this way
4240 so that if it isn't expanded inline, we fallback to
4241 calling bzero instead of memset. */
4243 location_t loc
= EXPR_LOCATION (exp
);
4245 return expand_builtin_memset_args (dest
, integer_zero_node
,
4246 fold_convert_loc (loc
,
4247 size_type_node
, size
),
4248 const0_rtx
, VOIDmode
, exp
);
4251 /* Try to expand cmpstr operation ICODE with the given operands.
4252 Return the result rtx on success, otherwise return null. */
4255 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4256 HOST_WIDE_INT align
)
4258 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4260 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4263 struct expand_operand ops
[4];
4264 create_output_operand (&ops
[0], target
, insn_mode
);
4265 create_fixed_operand (&ops
[1], arg1_rtx
);
4266 create_fixed_operand (&ops
[2], arg2_rtx
);
4267 create_integer_operand (&ops
[3], align
);
4268 if (maybe_expand_insn (icode
, 4, ops
))
4269 return ops
[0].value
;
4273 /* Expand expression EXP, which is a call to the memcmp built-in function.
4274 Return NULL_RTX if we failed and the caller should emit a normal call,
4275 otherwise try to get the result in TARGET, if convenient.
4276 RESULT_EQ is true if we can relax the returned value to be either zero
4277 or nonzero, without caring about the sign. */
4280 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4282 if (!validate_arglist (exp
,
4283 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4286 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4287 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4288 tree len
= CALL_EXPR_ARG (exp
, 2);
4290 /* Diagnose calls where the specified length exceeds the size of either
4292 if (warn_stringop_overflow
)
4294 tree size
= compute_objsize (arg1
, 0);
4295 if (check_sizes (OPT_Wstringop_overflow_
,
4296 exp
, len
, /*maxlen=*/NULL_TREE
,
4297 size
, /*objsize=*/NULL_TREE
))
4299 size
= compute_objsize (arg2
, 0);
4300 check_sizes (OPT_Wstringop_overflow_
,
4301 exp
, len
, /*maxlen=*/NULL_TREE
,
4302 size
, /*objsize=*/NULL_TREE
);
4306 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4307 location_t loc
= EXPR_LOCATION (exp
);
4309 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4310 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4312 /* If we don't have POINTER_TYPE, call the function. */
4313 if (arg1_align
== 0 || arg2_align
== 0)
4316 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4317 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4318 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4320 /* Set MEM_SIZE as appropriate. */
4321 if (CONST_INT_P (len_rtx
))
4323 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4324 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4327 by_pieces_constfn constfn
= NULL
;
4329 const char *src_str
= c_getstr (arg2
);
4330 if (result_eq
&& src_str
== NULL
)
4332 src_str
= c_getstr (arg1
);
4333 if (src_str
!= NULL
)
4334 std::swap (arg1_rtx
, arg2_rtx
);
4337 /* If SRC is a string constant and block move would be done
4338 by pieces, we can avoid loading the string from memory
4339 and only stored the computed constants. */
4341 && CONST_INT_P (len_rtx
)
4342 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4343 constfn
= builtin_memcpy_read_str
;
4345 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4346 TREE_TYPE (len
), target
,
4348 CONST_CAST (char *, src_str
));
4352 /* Return the value in the proper mode for this function. */
4353 if (GET_MODE (result
) == mode
)
4358 convert_move (target
, result
, 0);
4362 return convert_to_mode (mode
, result
, 0);
4368 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4369 if we failed the caller should emit a normal call, otherwise try to get
4370 the result in TARGET, if convenient. */
4373 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4375 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4378 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4379 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4380 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4382 rtx arg1_rtx
, arg2_rtx
;
4384 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4385 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4386 rtx result
= NULL_RTX
;
4388 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4389 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4391 /* If we don't have POINTER_TYPE, call the function. */
4392 if (arg1_align
== 0 || arg2_align
== 0)
4395 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4396 arg1
= builtin_save_expr (arg1
);
4397 arg2
= builtin_save_expr (arg2
);
4399 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4400 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4402 /* Try to call cmpstrsi. */
4403 if (cmpstr_icode
!= CODE_FOR_nothing
)
4404 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4405 MIN (arg1_align
, arg2_align
));
4407 /* Try to determine at least one length and call cmpstrnsi. */
4408 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4413 tree len1
= c_strlen (arg1
, 1);
4414 tree len2
= c_strlen (arg2
, 1);
4417 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4419 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4421 /* If we don't have a constant length for the first, use the length
4422 of the second, if we know it. We don't require a constant for
4423 this case; some cost analysis could be done if both are available
4424 but neither is constant. For now, assume they're equally cheap,
4425 unless one has side effects. If both strings have constant lengths,
4432 else if (TREE_SIDE_EFFECTS (len1
))
4434 else if (TREE_SIDE_EFFECTS (len2
))
4436 else if (TREE_CODE (len1
) != INTEGER_CST
)
4438 else if (TREE_CODE (len2
) != INTEGER_CST
)
4440 else if (tree_int_cst_lt (len1
, len2
))
4445 /* If both arguments have side effects, we cannot optimize. */
4446 if (len
&& !TREE_SIDE_EFFECTS (len
))
4448 arg3_rtx
= expand_normal (len
);
4449 result
= expand_cmpstrn_or_cmpmem
4450 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4451 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4457 /* Return the value in the proper mode for this function. */
4458 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4459 if (GET_MODE (result
) == mode
)
4462 return convert_to_mode (mode
, result
, 0);
4463 convert_move (target
, result
, 0);
4467 /* Expand the library call ourselves using a stabilized argument
4468 list to avoid re-evaluating the function's arguments twice. */
4469 fndecl
= get_callee_fndecl (exp
);
4470 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4471 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4472 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4473 return expand_call (fn
, target
, target
== const0_rtx
);
4478 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4479 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4480 the result in TARGET, if convenient. */
4483 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4484 ATTRIBUTE_UNUSED machine_mode mode
)
4486 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4488 if (!validate_arglist (exp
,
4489 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4492 /* If c_strlen can determine an expression for one of the string
4493 lengths, and it doesn't have side effects, then emit cmpstrnsi
4494 using length MIN(strlen(string)+1, arg3). */
4495 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4496 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4498 tree len
, len1
, len2
, len3
;
4499 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4502 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4503 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4504 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4506 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4507 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4509 len1
= c_strlen (arg1
, 1);
4510 len2
= c_strlen (arg2
, 1);
4513 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4515 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4517 len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4519 /* If we don't have a constant length for the first, use the length
4520 of the second, if we know it. If neither string is constant length,
4521 use the given length argument. We don't require a constant for
4522 this case; some cost analysis could be done if both are available
4523 but neither is constant. For now, assume they're equally cheap,
4524 unless one has side effects. If both strings have constant lengths,
4533 else if (TREE_SIDE_EFFECTS (len1
))
4535 else if (TREE_SIDE_EFFECTS (len2
))
4537 else if (TREE_CODE (len1
) != INTEGER_CST
)
4539 else if (TREE_CODE (len2
) != INTEGER_CST
)
4541 else if (tree_int_cst_lt (len1
, len2
))
4546 /* If we are not using the given length, we must incorporate it here.
4547 The actual new length parameter will be MIN(len,arg3) in this case. */
4549 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4550 arg1_rtx
= get_memory_rtx (arg1
, len
);
4551 arg2_rtx
= get_memory_rtx (arg2
, len
);
4552 arg3_rtx
= expand_normal (len
);
4553 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4554 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4555 MIN (arg1_align
, arg2_align
));
4558 /* Return the value in the proper mode for this function. */
4559 mode
= TYPE_MODE (TREE_TYPE (exp
));
4560 if (GET_MODE (result
) == mode
)
4563 return convert_to_mode (mode
, result
, 0);
4564 convert_move (target
, result
, 0);
4568 /* Expand the library call ourselves using a stabilized argument
4569 list to avoid re-evaluating the function's arguments twice. */
4570 fndecl
= get_callee_fndecl (exp
);
4571 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4573 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4574 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4575 return expand_call (fn
, target
, target
== const0_rtx
);
4580 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4581 if that's convenient. */
4584 expand_builtin_saveregs (void)
4589 /* Don't do __builtin_saveregs more than once in a function.
4590 Save the result of the first call and reuse it. */
4591 if (saveregs_value
!= 0)
4592 return saveregs_value
;
4594 /* When this function is called, it means that registers must be
4595 saved on entry to this function. So we migrate the call to the
4596 first insn of this function. */
4600 /* Do whatever the machine needs done in this case. */
4601 val
= targetm
.calls
.expand_builtin_saveregs ();
4606 saveregs_value
= val
;
4608 /* Put the insns after the NOTE that starts the function. If this
4609 is inside a start_sequence, make the outer-level insn chain current, so
4610 the code is placed at the start of the function. */
4611 push_topmost_sequence ();
4612 emit_insn_after (seq
, entry_of_function ());
4613 pop_topmost_sequence ();
4618 /* Expand a call to __builtin_next_arg. */
4621 expand_builtin_next_arg (void)
4623 /* Checking arguments is already done in fold_builtin_next_arg
4624 that must be called before this function. */
4625 return expand_binop (ptr_mode
, add_optab
,
4626 crtl
->args
.internal_arg_pointer
,
4627 crtl
->args
.arg_offset_rtx
,
4628 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4631 /* Make it easier for the backends by protecting the valist argument
4632 from multiple evaluations. */
4635 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4637 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4639 /* The current way of determining the type of valist is completely
4640 bogus. We should have the information on the va builtin instead. */
4642 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4644 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4646 if (TREE_SIDE_EFFECTS (valist
))
4647 valist
= save_expr (valist
);
4649 /* For this case, the backends will be expecting a pointer to
4650 vatype, but it's possible we've actually been given an array
4651 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4653 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4655 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4656 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4661 tree pt
= build_pointer_type (vatype
);
4665 if (! TREE_SIDE_EFFECTS (valist
))
4668 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4669 TREE_SIDE_EFFECTS (valist
) = 1;
4672 if (TREE_SIDE_EFFECTS (valist
))
4673 valist
= save_expr (valist
);
4674 valist
= fold_build2_loc (loc
, MEM_REF
,
4675 vatype
, valist
, build_int_cst (pt
, 0));
4681 /* The "standard" definition of va_list is void*. */
4684 std_build_builtin_va_list (void)
4686 return ptr_type_node
;
4689 /* The "standard" abi va_list is va_list_type_node. */
4692 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4694 return va_list_type_node
;
4697 /* The "standard" type of va_list is va_list_type_node. */
4700 std_canonical_va_list_type (tree type
)
4704 wtype
= va_list_type_node
;
4707 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4709 /* If va_list is an array type, the argument may have decayed
4710 to a pointer type, e.g. by being passed to another function.
4711 In that case, unwrap both types so that we can compare the
4712 underlying records. */
4713 if (TREE_CODE (htype
) == ARRAY_TYPE
4714 || POINTER_TYPE_P (htype
))
4716 wtype
= TREE_TYPE (wtype
);
4717 htype
= TREE_TYPE (htype
);
4720 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4721 return va_list_type_node
;
4726 /* The "standard" implementation of va_start: just assign `nextarg' to
4730 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4732 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4733 convert_move (va_r
, nextarg
, 0);
4735 /* We do not have any valid bounds for the pointer, so
4736 just store zero bounds for it. */
4737 if (chkp_function_instrumented_p (current_function_decl
))
4738 chkp_expand_bounds_reset_for_mem (valist
,
4739 make_tree (TREE_TYPE (valist
),
4743 /* Expand EXP, a call to __builtin_va_start. */
4746 expand_builtin_va_start (tree exp
)
4750 location_t loc
= EXPR_LOCATION (exp
);
4752 if (call_expr_nargs (exp
) < 2)
4754 error_at (loc
, "too few arguments to function %<va_start%>");
4758 if (fold_builtin_next_arg (exp
, true))
4761 nextarg
= expand_builtin_next_arg ();
4762 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4764 if (targetm
.expand_builtin_va_start
)
4765 targetm
.expand_builtin_va_start (valist
, nextarg
);
4767 std_expand_builtin_va_start (valist
, nextarg
);
4772 /* Expand EXP, a call to __builtin_va_end. */
4775 expand_builtin_va_end (tree exp
)
4777 tree valist
= CALL_EXPR_ARG (exp
, 0);
4779 /* Evaluate for side effects, if needed. I hate macros that don't
4781 if (TREE_SIDE_EFFECTS (valist
))
4782 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4787 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4788 builtin rather than just as an assignment in stdarg.h because of the
4789 nastiness of array-type va_list types. */
4792 expand_builtin_va_copy (tree exp
)
4795 location_t loc
= EXPR_LOCATION (exp
);
4797 dst
= CALL_EXPR_ARG (exp
, 0);
4798 src
= CALL_EXPR_ARG (exp
, 1);
4800 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4801 src
= stabilize_va_list_loc (loc
, src
, 0);
4803 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4805 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4807 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4808 TREE_SIDE_EFFECTS (t
) = 1;
4809 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4813 rtx dstb
, srcb
, size
;
4815 /* Evaluate to pointers. */
4816 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4817 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4818 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4819 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4821 dstb
= convert_memory_address (Pmode
, dstb
);
4822 srcb
= convert_memory_address (Pmode
, srcb
);
4824 /* "Dereference" to BLKmode memories. */
4825 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4826 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4827 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4828 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4829 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4830 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4833 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4839 /* Expand a call to one of the builtin functions __builtin_frame_address or
4840 __builtin_return_address. */
4843 expand_builtin_frame_address (tree fndecl
, tree exp
)
4845 /* The argument must be a nonnegative integer constant.
4846 It counts the number of frames to scan up the stack.
4847 The value is either the frame pointer value or the return
4848 address saved in that frame. */
4849 if (call_expr_nargs (exp
) == 0)
4850 /* Warning about missing arg was already issued. */
4852 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4854 error ("invalid argument to %qD", fndecl
);
4859 /* Number of frames to scan up the stack. */
4860 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4862 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4864 /* Some ports cannot access arbitrary stack frames. */
4867 warning (0, "unsupported argument to %qD", fndecl
);
4873 /* Warn since no effort is made to ensure that any frame
4874 beyond the current one exists or can be safely reached. */
4875 warning (OPT_Wframe_address
, "calling %qD with "
4876 "a nonzero argument is unsafe", fndecl
);
4879 /* For __builtin_frame_address, return what we've got. */
4880 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4884 && ! CONSTANT_P (tem
))
4885 tem
= copy_addr_to_reg (tem
);
4890 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4891 failed and the caller should emit a normal call. */
4894 expand_builtin_alloca (tree exp
)
4899 tree fndecl
= get_callee_fndecl (exp
);
4900 HOST_WIDE_INT max_size
;
4901 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4902 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
4904 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4905 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
4907 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
4908 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4909 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4914 if ((alloca_for_var
&& !warn_vla_limit
)
4915 || (!alloca_for_var
&& !warn_alloca_limit
))
4917 /* -Walloca-larger-than and -Wvla-larger-than settings override
4918 the more general -Walloc-size-larger-than so unless either of
4919 the former options is specified check the alloca arguments for
4921 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
4922 int idx
[] = { 0, -1 };
4923 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
4926 /* Compute the argument. */
4927 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4929 /* Compute the alignment. */
4930 align
= (fcode
== BUILT_IN_ALLOCA
4932 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
4934 /* Compute the maximum size. */
4935 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4936 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
4939 /* Allocate the desired space. If the allocation stems from the declaration
4940 of a variable-sized object, it cannot accumulate. */
4942 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
4943 result
= convert_memory_address (ptr_mode
, result
);
4948 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4949 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4950 dummy value into second parameter relying on this function to perform the
4951 change. See motivation for this in comment to handle_builtin_stack_restore
4955 expand_asan_emit_allocas_unpoison (tree exp
)
4957 tree arg0
= CALL_EXPR_ARG (exp
, 0);
4958 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
4959 rtx bot
= convert_memory_address (ptr_mode
, virtual_stack_dynamic_rtx
);
4960 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
4961 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
4962 top
, ptr_mode
, bot
, ptr_mode
);
4966 /* Expand a call to bswap builtin in EXP.
4967 Return NULL_RTX if a normal call should be emitted rather than expanding the
4968 function in-line. If convenient, the result should be placed in TARGET.
4969 SUBTARGET may be used as the target for computing one of EXP's operands. */
4972 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4978 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4981 arg
= CALL_EXPR_ARG (exp
, 0);
4982 op0
= expand_expr (arg
,
4983 subtarget
&& GET_MODE (subtarget
) == target_mode
4984 ? subtarget
: NULL_RTX
,
4985 target_mode
, EXPAND_NORMAL
);
4986 if (GET_MODE (op0
) != target_mode
)
4987 op0
= convert_to_mode (target_mode
, op0
, 1);
4989 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4991 gcc_assert (target
);
4993 return convert_to_mode (target_mode
, target
, 1);
4996 /* Expand a call to a unary builtin in EXP.
4997 Return NULL_RTX if a normal call should be emitted rather than expanding the
4998 function in-line. If convenient, the result should be placed in TARGET.
4999 SUBTARGET may be used as the target for computing one of EXP's operands. */
5002 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5003 rtx subtarget
, optab op_optab
)
5007 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5010 /* Compute the argument. */
5011 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5013 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5014 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5015 VOIDmode
, EXPAND_NORMAL
);
5016 /* Compute op, into TARGET if possible.
5017 Set TARGET to wherever the result comes back. */
5018 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5019 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5020 gcc_assert (target
);
5022 return convert_to_mode (target_mode
, target
, 0);
5025 /* Expand a call to __builtin_expect. We just return our argument
5026 as the builtin_expect semantic should've been already executed by
5027 tree branch prediction pass. */
5030 expand_builtin_expect (tree exp
, rtx target
)
5034 if (call_expr_nargs (exp
) < 2)
5036 arg
= CALL_EXPR_ARG (exp
, 0);
5038 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5039 /* When guessing was done, the hints should be already stripped away. */
5040 gcc_assert (!flag_guess_branch_prob
5041 || optimize
== 0 || seen_error ());
5045 /* Expand a call to __builtin_assume_aligned. We just return our first
5046 argument as the builtin_assume_aligned semantic should've been already
5050 expand_builtin_assume_aligned (tree exp
, rtx target
)
5052 if (call_expr_nargs (exp
) < 2)
5054 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5056 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5057 && (call_expr_nargs (exp
) < 3
5058 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5063 expand_builtin_trap (void)
5065 if (targetm
.have_trap ())
5067 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5068 /* For trap insns when not accumulating outgoing args force
5069 REG_ARGS_SIZE note to prevent crossjumping of calls with
5070 different args sizes. */
5071 if (!ACCUMULATE_OUTGOING_ARGS
)
5072 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
5076 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5077 tree call_expr
= build_call_expr (fn
, 0);
5078 expand_call (call_expr
, NULL_RTX
, false);
5084 /* Expand a call to __builtin_unreachable. We do nothing except emit
5085 a barrier saying that control flow will not pass here.
5087 It is the responsibility of the program being compiled to ensure
5088 that control flow does never reach __builtin_unreachable. */
5090 expand_builtin_unreachable (void)
5095 /* Expand EXP, a call to fabs, fabsf or fabsl.
5096 Return NULL_RTX if a normal call should be emitted rather than expanding
5097 the function inline. If convenient, the result should be placed
5098 in TARGET. SUBTARGET may be used as the target for computing
5102 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5108 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5111 arg
= CALL_EXPR_ARG (exp
, 0);
5112 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5113 mode
= TYPE_MODE (TREE_TYPE (arg
));
5114 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5115 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5118 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5119 Return NULL is a normal call should be emitted rather than expanding the
5120 function inline. If convenient, the result should be placed in TARGET.
5121 SUBTARGET may be used as the target for computing the operand. */
5124 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5129 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5132 arg
= CALL_EXPR_ARG (exp
, 0);
5133 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5135 arg
= CALL_EXPR_ARG (exp
, 1);
5136 op1
= expand_normal (arg
);
5138 return expand_copysign (op0
, op1
, target
);
5141 /* Expand a call to __builtin___clear_cache. */
5144 expand_builtin___clear_cache (tree exp
)
5146 if (!targetm
.code_for_clear_cache
)
5148 #ifdef CLEAR_INSN_CACHE
5149 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5150 does something. Just do the default expansion to a call to
5154 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5155 does nothing. There is no need to call it. Do nothing. */
5157 #endif /* CLEAR_INSN_CACHE */
5160 /* We have a "clear_cache" insn, and it will handle everything. */
5162 rtx begin_rtx
, end_rtx
;
5164 /* We must not expand to a library call. If we did, any
5165 fallback library function in libgcc that might contain a call to
5166 __builtin___clear_cache() would recurse infinitely. */
5167 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5169 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5173 if (targetm
.have_clear_cache ())
5175 struct expand_operand ops
[2];
5177 begin
= CALL_EXPR_ARG (exp
, 0);
5178 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5180 end
= CALL_EXPR_ARG (exp
, 1);
5181 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5183 create_address_operand (&ops
[0], begin_rtx
);
5184 create_address_operand (&ops
[1], end_rtx
);
5185 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5191 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5194 round_trampoline_addr (rtx tramp
)
5196 rtx temp
, addend
, mask
;
5198 /* If we don't need too much alignment, we'll have been guaranteed
5199 proper alignment by get_trampoline_type. */
5200 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5203 /* Round address up to desired boundary. */
5204 temp
= gen_reg_rtx (Pmode
);
5205 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5206 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5208 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5209 temp
, 0, OPTAB_LIB_WIDEN
);
5210 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5211 temp
, 0, OPTAB_LIB_WIDEN
);
5217 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5219 tree t_tramp
, t_func
, t_chain
;
5220 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5222 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5223 POINTER_TYPE
, VOID_TYPE
))
5226 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5227 t_func
= CALL_EXPR_ARG (exp
, 1);
5228 t_chain
= CALL_EXPR_ARG (exp
, 2);
5230 r_tramp
= expand_normal (t_tramp
);
5231 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5232 MEM_NOTRAP_P (m_tramp
) = 1;
5234 /* If ONSTACK, the TRAMP argument should be the address of a field
5235 within the local function's FRAME decl. Either way, let's see if
5236 we can fill in the MEM_ATTRs for this memory. */
5237 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5238 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5240 /* Creator of a heap trampoline is responsible for making sure the
5241 address is aligned to at least STACK_BOUNDARY. Normally malloc
5242 will ensure this anyhow. */
5243 tmp
= round_trampoline_addr (r_tramp
);
5246 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5247 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5248 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5251 /* The FUNC argument should be the address of the nested function.
5252 Extract the actual function decl to pass to the hook. */
5253 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5254 t_func
= TREE_OPERAND (t_func
, 0);
5255 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5257 r_chain
= expand_normal (t_chain
);
5259 /* Generate insns to initialize the trampoline. */
5260 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5264 trampolines_created
= 1;
5266 if (targetm
.calls
.custom_function_descriptors
!= 0)
5267 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5268 "trampoline generated for nested function %qD", t_func
);
5275 expand_builtin_adjust_trampoline (tree exp
)
5279 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5282 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5283 tramp
= round_trampoline_addr (tramp
);
5284 if (targetm
.calls
.trampoline_adjust_address
)
5285 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5290 /* Expand a call to the builtin descriptor initialization routine.
5291 A descriptor is made up of a couple of pointers to the static
5292 chain and the code entry in this order. */
5295 expand_builtin_init_descriptor (tree exp
)
5297 tree t_descr
, t_func
, t_chain
;
5298 rtx m_descr
, r_descr
, r_func
, r_chain
;
5300 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5304 t_descr
= CALL_EXPR_ARG (exp
, 0);
5305 t_func
= CALL_EXPR_ARG (exp
, 1);
5306 t_chain
= CALL_EXPR_ARG (exp
, 2);
5308 r_descr
= expand_normal (t_descr
);
5309 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5310 MEM_NOTRAP_P (m_descr
) = 1;
5312 r_func
= expand_normal (t_func
);
5313 r_chain
= expand_normal (t_chain
);
5315 /* Generate insns to initialize the descriptor. */
5316 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5317 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5318 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5323 /* Expand a call to the builtin descriptor adjustment routine. */
5326 expand_builtin_adjust_descriptor (tree exp
)
5330 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5333 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5335 /* Unalign the descriptor to allow runtime identification. */
5336 tramp
= plus_constant (ptr_mode
, tramp
,
5337 targetm
.calls
.custom_function_descriptors
);
5339 return force_operand (tramp
, NULL_RTX
);
5342 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5343 function. The function first checks whether the back end provides
5344 an insn to implement signbit for the respective mode. If not, it
5345 checks whether the floating point format of the value is such that
5346 the sign bit can be extracted. If that is not the case, error out.
5347 EXP is the expression that is a call to the builtin function; if
5348 convenient, the result should be placed in TARGET. */
5350 expand_builtin_signbit (tree exp
, rtx target
)
5352 const struct real_format
*fmt
;
5353 scalar_float_mode fmode
;
5354 scalar_int_mode rmode
, imode
;
5357 enum insn_code icode
;
5359 location_t loc
= EXPR_LOCATION (exp
);
5361 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5364 arg
= CALL_EXPR_ARG (exp
, 0);
5365 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5366 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5367 fmt
= REAL_MODE_FORMAT (fmode
);
5369 arg
= builtin_save_expr (arg
);
5371 /* Expand the argument yielding a RTX expression. */
5372 temp
= expand_normal (arg
);
5374 /* Check if the back end provides an insn that handles signbit for the
5376 icode
= optab_handler (signbit_optab
, fmode
);
5377 if (icode
!= CODE_FOR_nothing
)
5379 rtx_insn
*last
= get_last_insn ();
5380 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5381 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5383 delete_insns_since (last
);
5386 /* For floating point formats without a sign bit, implement signbit
5388 bitpos
= fmt
->signbit_ro
;
5391 /* But we can't do this if the format supports signed zero. */
5392 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5394 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5395 build_real (TREE_TYPE (arg
), dconst0
));
5396 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5399 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5401 imode
= int_mode_for_mode (fmode
).require ();
5402 temp
= gen_lowpart (imode
, temp
);
5407 /* Handle targets with different FP word orders. */
5408 if (FLOAT_WORDS_BIG_ENDIAN
)
5409 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5411 word
= bitpos
/ BITS_PER_WORD
;
5412 temp
= operand_subword_force (temp
, word
, fmode
);
5413 bitpos
= bitpos
% BITS_PER_WORD
;
5416 /* Force the intermediate word_mode (or narrower) result into a
5417 register. This avoids attempting to create paradoxical SUBREGs
5418 of floating point modes below. */
5419 temp
= force_reg (imode
, temp
);
5421 /* If the bitpos is within the "result mode" lowpart, the operation
5422 can be implement with a single bitwise AND. Otherwise, we need
5423 a right shift and an AND. */
5425 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5427 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5429 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5430 temp
= gen_lowpart (rmode
, temp
);
5431 temp
= expand_binop (rmode
, and_optab
, temp
,
5432 immed_wide_int_const (mask
, rmode
),
5433 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5437 /* Perform a logical right shift to place the signbit in the least
5438 significant bit, then truncate the result to the desired mode
5439 and mask just this bit. */
5440 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5441 temp
= gen_lowpart (rmode
, temp
);
5442 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5443 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5449 /* Expand fork or exec calls. TARGET is the desired target of the
5450 call. EXP is the call. FN is the
5451 identificator of the actual function. IGNORE is nonzero if the
5452 value is to be ignored. */
5455 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5460 /* If we are not profiling, just call the function. */
5461 if (!profile_arc_flag
)
5464 /* Otherwise call the wrapper. This should be equivalent for the rest of
5465 compiler, so the code does not diverge, and the wrapper may run the
5466 code necessary for keeping the profiling sane. */
5468 switch (DECL_FUNCTION_CODE (fn
))
5471 id
= get_identifier ("__gcov_fork");
5474 case BUILT_IN_EXECL
:
5475 id
= get_identifier ("__gcov_execl");
5478 case BUILT_IN_EXECV
:
5479 id
= get_identifier ("__gcov_execv");
5482 case BUILT_IN_EXECLP
:
5483 id
= get_identifier ("__gcov_execlp");
5486 case BUILT_IN_EXECLE
:
5487 id
= get_identifier ("__gcov_execle");
5490 case BUILT_IN_EXECVP
:
5491 id
= get_identifier ("__gcov_execvp");
5494 case BUILT_IN_EXECVE
:
5495 id
= get_identifier ("__gcov_execve");
5502 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5503 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5504 DECL_EXTERNAL (decl
) = 1;
5505 TREE_PUBLIC (decl
) = 1;
5506 DECL_ARTIFICIAL (decl
) = 1;
5507 TREE_NOTHROW (decl
) = 1;
5508 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5509 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5510 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5511 return expand_call (call
, target
, ignore
);
5516 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5517 the pointer in these functions is void*, the tree optimizers may remove
5518 casts. The mode computed in expand_builtin isn't reliable either, due
5519 to __sync_bool_compare_and_swap.
5521 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5522 group of builtins. This gives us log2 of the mode size. */
5524 static inline machine_mode
5525 get_builtin_sync_mode (int fcode_diff
)
5527 /* The size is not negotiable, so ask not to get BLKmode in return
5528 if the target indicates that a smaller size would be better. */
5529 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5532 /* Expand the memory expression LOC and return the appropriate memory operand
5533 for the builtin_sync operations. */
5536 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5540 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5541 addr
= convert_memory_address (Pmode
, addr
);
5543 /* Note that we explicitly do not want any alias information for this
5544 memory, so that we kill all other live memories. Otherwise we don't
5545 satisfy the full barrier semantics of the intrinsic. */
5546 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5548 /* The alignment needs to be at least according to that of the mode. */
5549 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5550 get_pointer_alignment (loc
)));
5551 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5552 MEM_VOLATILE_P (mem
) = 1;
5557 /* Make sure an argument is in the right mode.
5558 EXP is the tree argument.
5559 MODE is the mode it should be in. */
5562 expand_expr_force_mode (tree exp
, machine_mode mode
)
5565 machine_mode old_mode
;
5567 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5568 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5569 of CONST_INTs, where we know the old_mode only from the call argument. */
5571 old_mode
= GET_MODE (val
);
5572 if (old_mode
== VOIDmode
)
5573 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5574 val
= convert_modes (mode
, old_mode
, val
, 1);
5579 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5580 EXP is the CALL_EXPR. CODE is the rtx code
5581 that corresponds to the arithmetic or logical operation from the name;
5582 an exception here is that NOT actually means NAND. TARGET is an optional
5583 place for us to store the results; AFTER is true if this is the
5584 fetch_and_xxx form. */
5587 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5588 enum rtx_code code
, bool after
,
5592 location_t loc
= EXPR_LOCATION (exp
);
5594 if (code
== NOT
&& warn_sync_nand
)
5596 tree fndecl
= get_callee_fndecl (exp
);
5597 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5599 static bool warned_f_a_n
, warned_n_a_f
;
5603 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5604 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5605 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5606 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5607 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5611 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5612 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5613 warned_f_a_n
= true;
5616 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5617 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5618 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5619 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5620 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5624 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5625 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5626 warned_n_a_f
= true;
5634 /* Expand the operands. */
5635 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5636 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5638 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5642 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5643 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5644 true if this is the boolean form. TARGET is a place for us to store the
5645 results; this is NOT optional if IS_BOOL is true. */
5648 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5649 bool is_bool
, rtx target
)
5651 rtx old_val
, new_val
, mem
;
5654 /* Expand the operands. */
5655 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5656 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5657 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5659 pbool
= poval
= NULL
;
5660 if (target
!= const0_rtx
)
5667 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5668 false, MEMMODEL_SYNC_SEQ_CST
,
5669 MEMMODEL_SYNC_SEQ_CST
))
5675 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5676 general form is actually an atomic exchange, and some targets only
5677 support a reduced form with the second argument being a constant 1.
5678 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5682 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5687 /* Expand the operands. */
5688 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5689 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5691 return expand_sync_lock_test_and_set (target
, mem
, val
);
5694 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5697 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5701 /* Expand the operands. */
5702 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5704 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5707 /* Given an integer representing an ``enum memmodel'', verify its
5708 correctness and return the memory model enum. */
5710 static enum memmodel
5711 get_memmodel (tree exp
)
5714 unsigned HOST_WIDE_INT val
;
5716 = expansion_point_location_if_in_system_header (input_location
);
5718 /* If the parameter is not a constant, it's a run time value so we'll just
5719 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5720 if (TREE_CODE (exp
) != INTEGER_CST
)
5721 return MEMMODEL_SEQ_CST
;
5723 op
= expand_normal (exp
);
5726 if (targetm
.memmodel_check
)
5727 val
= targetm
.memmodel_check (val
);
5728 else if (val
& ~MEMMODEL_MASK
)
5730 warning_at (loc
, OPT_Winvalid_memory_model
,
5731 "unknown architecture specifier in memory model to builtin");
5732 return MEMMODEL_SEQ_CST
;
5735 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5736 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5738 warning_at (loc
, OPT_Winvalid_memory_model
,
5739 "invalid memory model argument to builtin");
5740 return MEMMODEL_SEQ_CST
;
5743 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5744 be conservative and promote consume to acquire. */
5745 if (val
== MEMMODEL_CONSUME
)
5746 val
= MEMMODEL_ACQUIRE
;
5748 return (enum memmodel
) val
;
5751 /* Expand the __atomic_exchange intrinsic:
5752 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5753 EXP is the CALL_EXPR.
5754 TARGET is an optional place for us to store the results. */
5757 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5760 enum memmodel model
;
5762 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5764 if (!flag_inline_atomics
)
5767 /* Expand the operands. */
5768 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5769 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5771 return expand_atomic_exchange (target
, mem
, val
, model
);
5774 /* Expand the __atomic_compare_exchange intrinsic:
5775 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5776 TYPE desired, BOOL weak,
5777 enum memmodel success,
5778 enum memmodel failure)
5779 EXP is the CALL_EXPR.
5780 TARGET is an optional place for us to store the results. */
5783 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5786 rtx expect
, desired
, mem
, oldval
;
5787 rtx_code_label
*label
;
5788 enum memmodel success
, failure
;
5792 = expansion_point_location_if_in_system_header (input_location
);
5794 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5795 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5797 if (failure
> success
)
5799 warning_at (loc
, OPT_Winvalid_memory_model
,
5800 "failure memory model cannot be stronger than success "
5801 "memory model for %<__atomic_compare_exchange%>");
5802 success
= MEMMODEL_SEQ_CST
;
5805 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5807 warning_at (loc
, OPT_Winvalid_memory_model
,
5808 "invalid failure memory model for "
5809 "%<__atomic_compare_exchange%>");
5810 failure
= MEMMODEL_SEQ_CST
;
5811 success
= MEMMODEL_SEQ_CST
;
5815 if (!flag_inline_atomics
)
5818 /* Expand the operands. */
5819 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5821 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5822 expect
= convert_memory_address (Pmode
, expect
);
5823 expect
= gen_rtx_MEM (mode
, expect
);
5824 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5826 weak
= CALL_EXPR_ARG (exp
, 3);
5828 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5831 if (target
== const0_rtx
)
5834 /* Lest the rtl backend create a race condition with an imporoper store
5835 to memory, always create a new pseudo for OLDVAL. */
5838 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5839 is_weak
, success
, failure
))
5842 /* Conditionally store back to EXPECT, lest we create a race condition
5843 with an improper store to memory. */
5844 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5845 the normal case where EXPECT is totally private, i.e. a register. At
5846 which point the store can be unconditional. */
5847 label
= gen_label_rtx ();
5848 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5849 GET_MODE (target
), 1, label
);
5850 emit_move_insn (expect
, oldval
);
5856 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5857 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5858 call. The weak parameter must be dropped to match the expected parameter
5859 list and the expected argument changed from value to pointer to memory
5863 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
5866 vec
<tree
, va_gc
> *vec
;
5869 vec
->quick_push (gimple_call_arg (call
, 0));
5870 tree expected
= gimple_call_arg (call
, 1);
5871 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
5872 TREE_TYPE (expected
));
5873 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
5875 emit_move_insn (x
, expd
);
5876 tree v
= make_tree (TREE_TYPE (expected
), x
);
5877 vec
->quick_push (build1 (ADDR_EXPR
,
5878 build_pointer_type (TREE_TYPE (expected
)), v
));
5879 vec
->quick_push (gimple_call_arg (call
, 2));
5880 /* Skip the boolean weak parameter. */
5881 for (z
= 4; z
< 6; z
++)
5882 vec
->quick_push (gimple_call_arg (call
, z
));
5883 built_in_function fncode
5884 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5885 + exact_log2 (GET_MODE_SIZE (mode
)));
5886 tree fndecl
= builtin_decl_explicit (fncode
);
5887 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
5889 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
5890 tree lhs
= gimple_call_lhs (call
);
5891 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
5894 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5895 if (GET_MODE (boolret
) != mode
)
5896 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5897 x
= force_reg (mode
, x
);
5898 write_complex_part (target
, boolret
, true);
5899 write_complex_part (target
, x
, false);
5903 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5906 expand_ifn_atomic_compare_exchange (gcall
*call
)
5908 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
5909 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
5910 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
5911 rtx expect
, desired
, mem
, oldval
, boolret
;
5912 enum memmodel success
, failure
;
5916 = expansion_point_location_if_in_system_header (gimple_location (call
));
5918 success
= get_memmodel (gimple_call_arg (call
, 4));
5919 failure
= get_memmodel (gimple_call_arg (call
, 5));
5921 if (failure
> success
)
5923 warning_at (loc
, OPT_Winvalid_memory_model
,
5924 "failure memory model cannot be stronger than success "
5925 "memory model for %<__atomic_compare_exchange%>");
5926 success
= MEMMODEL_SEQ_CST
;
5929 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5931 warning_at (loc
, OPT_Winvalid_memory_model
,
5932 "invalid failure memory model for "
5933 "%<__atomic_compare_exchange%>");
5934 failure
= MEMMODEL_SEQ_CST
;
5935 success
= MEMMODEL_SEQ_CST
;
5938 if (!flag_inline_atomics
)
5940 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5944 /* Expand the operands. */
5945 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
5947 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
5948 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
5950 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
5955 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
5956 is_weak
, success
, failure
))
5958 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5962 lhs
= gimple_call_lhs (call
);
5965 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5966 if (GET_MODE (boolret
) != mode
)
5967 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5968 write_complex_part (target
, boolret
, true);
5969 write_complex_part (target
, oldval
, false);
5973 /* Expand the __atomic_load intrinsic:
5974 TYPE __atomic_load (TYPE *object, enum memmodel)
5975 EXP is the CALL_EXPR.
5976 TARGET is an optional place for us to store the results. */
5979 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5982 enum memmodel model
;
5984 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5985 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5988 = expansion_point_location_if_in_system_header (input_location
);
5989 warning_at (loc
, OPT_Winvalid_memory_model
,
5990 "invalid memory model for %<__atomic_load%>");
5991 model
= MEMMODEL_SEQ_CST
;
5994 if (!flag_inline_atomics
)
5997 /* Expand the operand. */
5998 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6000 return expand_atomic_load (target
, mem
, model
);
6004 /* Expand the __atomic_store intrinsic:
6005 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6006 EXP is the CALL_EXPR.
6007 TARGET is an optional place for us to store the results. */
6010 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6013 enum memmodel model
;
6015 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6016 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6017 || is_mm_release (model
)))
6020 = expansion_point_location_if_in_system_header (input_location
);
6021 warning_at (loc
, OPT_Winvalid_memory_model
,
6022 "invalid memory model for %<__atomic_store%>");
6023 model
= MEMMODEL_SEQ_CST
;
6026 if (!flag_inline_atomics
)
6029 /* Expand the operands. */
6030 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6031 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6033 return expand_atomic_store (mem
, val
, model
, false);
6036 /* Expand the __atomic_fetch_XXX intrinsic:
6037 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6038 EXP is the CALL_EXPR.
6039 TARGET is an optional place for us to store the results.
6040 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6041 FETCH_AFTER is true if returning the result of the operation.
6042 FETCH_AFTER is false if returning the value before the operation.
6043 IGNORE is true if the result is not used.
6044 EXT_CALL is the correct builtin for an external call if this cannot be
6045 resolved to an instruction sequence. */
6048 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6049 enum rtx_code code
, bool fetch_after
,
6050 bool ignore
, enum built_in_function ext_call
)
6053 enum memmodel model
;
6057 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6059 /* Expand the operands. */
6060 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6061 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6063 /* Only try generating instructions if inlining is turned on. */
6064 if (flag_inline_atomics
)
6066 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6071 /* Return if a different routine isn't needed for the library call. */
6072 if (ext_call
== BUILT_IN_NONE
)
6075 /* Change the call to the specified function. */
6076 fndecl
= get_callee_fndecl (exp
);
6077 addr
= CALL_EXPR_FN (exp
);
6080 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6081 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6083 /* If we will emit code after the call, the call can not be a tail call.
6084 If it is emitted as a tail call, a barrier is emitted after it, and
6085 then all trailing code is removed. */
6087 CALL_EXPR_TAILCALL (exp
) = 0;
6089 /* Expand the call here so we can emit trailing code. */
6090 ret
= expand_call (exp
, target
, ignore
);
6092 /* Replace the original function just in case it matters. */
6093 TREE_OPERAND (addr
, 0) = fndecl
;
6095 /* Then issue the arithmetic correction to return the right result. */
6100 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6102 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6105 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6111 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6114 expand_ifn_atomic_bit_test_and (gcall
*call
)
6116 tree ptr
= gimple_call_arg (call
, 0);
6117 tree bit
= gimple_call_arg (call
, 1);
6118 tree flag
= gimple_call_arg (call
, 2);
6119 tree lhs
= gimple_call_lhs (call
);
6120 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6121 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6124 struct expand_operand ops
[5];
6126 gcc_assert (flag_inline_atomics
);
6128 if (gimple_call_num_args (call
) == 4)
6129 model
= get_memmodel (gimple_call_arg (call
, 3));
6131 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6132 rtx val
= expand_expr_force_mode (bit
, mode
);
6134 switch (gimple_call_internal_fn (call
))
6136 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6138 optab
= atomic_bit_test_and_set_optab
;
6140 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6142 optab
= atomic_bit_test_and_complement_optab
;
6144 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6146 optab
= atomic_bit_test_and_reset_optab
;
6152 if (lhs
== NULL_TREE
)
6154 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6155 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6157 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6158 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6162 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6163 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6164 gcc_assert (icode
!= CODE_FOR_nothing
);
6165 create_output_operand (&ops
[0], target
, mode
);
6166 create_fixed_operand (&ops
[1], mem
);
6167 create_convert_operand_to (&ops
[2], val
, mode
, true);
6168 create_integer_operand (&ops
[3], model
);
6169 create_integer_operand (&ops
[4], integer_onep (flag
));
6170 if (maybe_expand_insn (icode
, 5, ops
))
6174 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6175 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6178 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6179 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6180 code
, model
, false);
6181 if (integer_onep (flag
))
6183 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6184 NULL_RTX
, true, OPTAB_DIRECT
);
6185 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6186 true, OPTAB_DIRECT
);
6189 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6191 if (result
!= target
)
6192 emit_move_insn (target
, result
);
6195 /* Expand an atomic clear operation.
6196 void _atomic_clear (BOOL *obj, enum memmodel)
6197 EXP is the call expression. */
6200 expand_builtin_atomic_clear (tree exp
)
6204 enum memmodel model
;
6206 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6207 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6208 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6210 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6213 = expansion_point_location_if_in_system_header (input_location
);
6214 warning_at (loc
, OPT_Winvalid_memory_model
,
6215 "invalid memory model for %<__atomic_store%>");
6216 model
= MEMMODEL_SEQ_CST
;
6219 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6220 Failing that, a store is issued by __atomic_store. The only way this can
6221 fail is if the bool type is larger than a word size. Unlikely, but
6222 handle it anyway for completeness. Assume a single threaded model since
6223 there is no atomic support in this case, and no barriers are required. */
6224 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6226 emit_move_insn (mem
, const0_rtx
);
6230 /* Expand an atomic test_and_set operation.
6231 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6232 EXP is the call expression. */
6235 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6238 enum memmodel model
;
6241 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6242 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6243 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6245 return expand_atomic_test_and_set (target
, mem
, model
);
6249 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6250 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6253 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6257 unsigned int mode_align
, type_align
;
6259 if (TREE_CODE (arg0
) != INTEGER_CST
)
6262 /* We need a corresponding integer mode for the access to be lock-free. */
6263 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6264 if (!int_mode_for_size (size
, 0).exists (&mode
))
6265 return boolean_false_node
;
6267 mode_align
= GET_MODE_ALIGNMENT (mode
);
6269 if (TREE_CODE (arg1
) == INTEGER_CST
)
6271 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6273 /* Either this argument is null, or it's a fake pointer encoding
6274 the alignment of the object. */
6275 val
= least_bit_hwi (val
);
6276 val
*= BITS_PER_UNIT
;
6278 if (val
== 0 || mode_align
< val
)
6279 type_align
= mode_align
;
6285 tree ttype
= TREE_TYPE (arg1
);
6287 /* This function is usually invoked and folded immediately by the front
6288 end before anything else has a chance to look at it. The pointer
6289 parameter at this point is usually cast to a void *, so check for that
6290 and look past the cast. */
6291 if (CONVERT_EXPR_P (arg1
)
6292 && POINTER_TYPE_P (ttype
)
6293 && VOID_TYPE_P (TREE_TYPE (ttype
))
6294 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6295 arg1
= TREE_OPERAND (arg1
, 0);
6297 ttype
= TREE_TYPE (arg1
);
6298 gcc_assert (POINTER_TYPE_P (ttype
));
6300 /* Get the underlying type of the object. */
6301 ttype
= TREE_TYPE (ttype
);
6302 type_align
= TYPE_ALIGN (ttype
);
6305 /* If the object has smaller alignment, the lock free routines cannot
6307 if (type_align
< mode_align
)
6308 return boolean_false_node
;
6310 /* Check if a compare_and_swap pattern exists for the mode which represents
6311 the required size. The pattern is not allowed to fail, so the existence
6312 of the pattern indicates support is present. Also require that an
6313 atomic load exists for the required size. */
6314 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6315 return boolean_true_node
;
6317 return boolean_false_node
;
6320 /* Return true if the parameters to call EXP represent an object which will
6321 always generate lock free instructions. The first argument represents the
6322 size of the object, and the second parameter is a pointer to the object
6323 itself. If NULL is passed for the object, then the result is based on
6324 typical alignment for an object of the specified size. Otherwise return
6328 expand_builtin_atomic_always_lock_free (tree exp
)
6331 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6332 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6334 if (TREE_CODE (arg0
) != INTEGER_CST
)
6336 error ("non-constant argument 1 to __atomic_always_lock_free");
6340 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6341 if (size
== boolean_true_node
)
6346 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6347 is lock free on this architecture. */
6350 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6352 if (!flag_inline_atomics
)
6355 /* If it isn't always lock free, don't generate a result. */
6356 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6357 return boolean_true_node
;
6362 /* Return true if the parameters to call EXP represent an object which will
6363 always generate lock free instructions. The first argument represents the
6364 size of the object, and the second parameter is a pointer to the object
6365 itself. If NULL is passed for the object, then the result is based on
6366 typical alignment for an object of the specified size. Otherwise return
6370 expand_builtin_atomic_is_lock_free (tree exp
)
6373 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6374 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6376 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6378 error ("non-integer argument 1 to __atomic_is_lock_free");
6382 if (!flag_inline_atomics
)
6385 /* If the value is known at compile time, return the RTX for it. */
6386 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6387 if (size
== boolean_true_node
)
6393 /* Expand the __atomic_thread_fence intrinsic:
6394 void __atomic_thread_fence (enum memmodel)
6395 EXP is the CALL_EXPR. */
6398 expand_builtin_atomic_thread_fence (tree exp
)
6400 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6401 expand_mem_thread_fence (model
);
6404 /* Expand the __atomic_signal_fence intrinsic:
6405 void __atomic_signal_fence (enum memmodel)
6406 EXP is the CALL_EXPR. */
6409 expand_builtin_atomic_signal_fence (tree exp
)
6411 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6412 expand_mem_signal_fence (model
);
6415 /* Expand the __sync_synchronize intrinsic. */
6418 expand_builtin_sync_synchronize (void)
6420 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6424 expand_builtin_thread_pointer (tree exp
, rtx target
)
6426 enum insn_code icode
;
6427 if (!validate_arglist (exp
, VOID_TYPE
))
6429 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6430 if (icode
!= CODE_FOR_nothing
)
6432 struct expand_operand op
;
6433 /* If the target is not sutitable then create a new target. */
6434 if (target
== NULL_RTX
6436 || GET_MODE (target
) != Pmode
)
6437 target
= gen_reg_rtx (Pmode
);
6438 create_output_operand (&op
, target
, Pmode
);
6439 expand_insn (icode
, 1, &op
);
6442 error ("__builtin_thread_pointer is not supported on this target");
6447 expand_builtin_set_thread_pointer (tree exp
)
6449 enum insn_code icode
;
6450 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6452 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6453 if (icode
!= CODE_FOR_nothing
)
6455 struct expand_operand op
;
6456 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6457 Pmode
, EXPAND_NORMAL
);
6458 create_input_operand (&op
, val
, Pmode
);
6459 expand_insn (icode
, 1, &op
);
6462 error ("__builtin_set_thread_pointer is not supported on this target");
6466 /* Emit code to restore the current value of stack. */
6469 expand_stack_restore (tree var
)
6472 rtx sa
= expand_normal (var
);
6474 sa
= convert_memory_address (Pmode
, sa
);
6476 prev
= get_last_insn ();
6477 emit_stack_restore (SAVE_BLOCK
, sa
);
6479 record_new_stack_level ();
6481 fixup_args_size_notes (prev
, get_last_insn (), 0);
6484 /* Emit code to save the current value of stack. */
6487 expand_stack_save (void)
6491 emit_stack_save (SAVE_BLOCK
, &ret
);
6496 /* Expand an expression EXP that calls a built-in function,
6497 with result going to TARGET if that's convenient
6498 (and in mode MODE if that's convenient).
6499 SUBTARGET may be used as the target for computing one of EXP's operands.
6500 IGNORE is nonzero if the value is to be ignored. */
6503 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
6506 tree fndecl
= get_callee_fndecl (exp
);
6507 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6508 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6511 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6512 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6514 /* When ASan is enabled, we don't want to expand some memory/string
6515 builtins and rely on libsanitizer's hooks. This allows us to avoid
6516 redundant checks and be sure, that possible overflow will be detected
6519 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
6520 return expand_call (exp
, target
, ignore
);
6522 /* When not optimizing, generate calls to library functions for a certain
6525 && !called_as_built_in (fndecl
)
6526 && fcode
!= BUILT_IN_FORK
6527 && fcode
!= BUILT_IN_EXECL
6528 && fcode
!= BUILT_IN_EXECV
6529 && fcode
!= BUILT_IN_EXECLP
6530 && fcode
!= BUILT_IN_EXECLE
6531 && fcode
!= BUILT_IN_EXECVP
6532 && fcode
!= BUILT_IN_EXECVE
6533 && !ALLOCA_FUNCTION_CODE_P (fcode
)
6534 && fcode
!= BUILT_IN_FREE
6535 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
6536 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
6537 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
6538 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
6539 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6540 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
6541 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6542 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6543 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6544 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
6545 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
6546 && fcode
!= BUILT_IN_CHKP_BNDRET
)
6547 return expand_call (exp
, target
, ignore
);
6549 /* The built-in function expanders test for target == const0_rtx
6550 to determine whether the function's result will be ignored. */
6552 target
= const0_rtx
;
6554 /* If the result of a pure or const built-in function is ignored, and
6555 none of its arguments are volatile, we can avoid expanding the
6556 built-in call and just evaluate the arguments for side-effects. */
6557 if (target
== const0_rtx
6558 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6559 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6561 bool volatilep
= false;
6563 call_expr_arg_iterator iter
;
6565 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6566 if (TREE_THIS_VOLATILE (arg
))
6574 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6575 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6580 /* expand_builtin_with_bounds is supposed to be used for
6581 instrumented builtin calls. */
6582 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6586 CASE_FLT_FN (BUILT_IN_FABS
):
6587 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
6588 case BUILT_IN_FABSD32
:
6589 case BUILT_IN_FABSD64
:
6590 case BUILT_IN_FABSD128
:
6591 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6596 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6597 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
6598 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6603 /* Just do a normal library call if we were unable to fold
6605 CASE_FLT_FN (BUILT_IN_CABS
):
6608 CASE_FLT_FN (BUILT_IN_FMA
):
6609 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
6610 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6615 CASE_FLT_FN (BUILT_IN_ILOGB
):
6616 if (! flag_unsafe_math_optimizations
)
6619 CASE_FLT_FN (BUILT_IN_ISINF
):
6620 CASE_FLT_FN (BUILT_IN_FINITE
):
6621 case BUILT_IN_ISFINITE
:
6622 case BUILT_IN_ISNORMAL
:
6623 target
= expand_builtin_interclass_mathfn (exp
, target
);
6628 CASE_FLT_FN (BUILT_IN_ICEIL
):
6629 CASE_FLT_FN (BUILT_IN_LCEIL
):
6630 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6631 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6632 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6633 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6634 target
= expand_builtin_int_roundingfn (exp
, target
);
6639 CASE_FLT_FN (BUILT_IN_IRINT
):
6640 CASE_FLT_FN (BUILT_IN_LRINT
):
6641 CASE_FLT_FN (BUILT_IN_LLRINT
):
6642 CASE_FLT_FN (BUILT_IN_IROUND
):
6643 CASE_FLT_FN (BUILT_IN_LROUND
):
6644 CASE_FLT_FN (BUILT_IN_LLROUND
):
6645 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6650 CASE_FLT_FN (BUILT_IN_POWI
):
6651 target
= expand_builtin_powi (exp
, target
);
6656 CASE_FLT_FN (BUILT_IN_CEXPI
):
6657 target
= expand_builtin_cexpi (exp
, target
);
6658 gcc_assert (target
);
6661 CASE_FLT_FN (BUILT_IN_SIN
):
6662 CASE_FLT_FN (BUILT_IN_COS
):
6663 if (! flag_unsafe_math_optimizations
)
6665 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6670 CASE_FLT_FN (BUILT_IN_SINCOS
):
6671 if (! flag_unsafe_math_optimizations
)
6673 target
= expand_builtin_sincos (exp
);
6678 case BUILT_IN_APPLY_ARGS
:
6679 return expand_builtin_apply_args ();
6681 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6682 FUNCTION with a copy of the parameters described by
6683 ARGUMENTS, and ARGSIZE. It returns a block of memory
6684 allocated on the stack into which is stored all the registers
6685 that might possibly be used for returning the result of a
6686 function. ARGUMENTS is the value returned by
6687 __builtin_apply_args. ARGSIZE is the number of bytes of
6688 arguments that must be copied. ??? How should this value be
6689 computed? We'll also need a safe worst case value for varargs
6691 case BUILT_IN_APPLY
:
6692 if (!validate_arglist (exp
, POINTER_TYPE
,
6693 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6694 && !validate_arglist (exp
, REFERENCE_TYPE
,
6695 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6701 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6702 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6703 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6705 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6708 /* __builtin_return (RESULT) causes the function to return the
6709 value described by RESULT. RESULT is address of the block of
6710 memory returned by __builtin_apply. */
6711 case BUILT_IN_RETURN
:
6712 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6713 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6716 case BUILT_IN_SAVEREGS
:
6717 return expand_builtin_saveregs ();
6719 case BUILT_IN_VA_ARG_PACK
:
6720 /* All valid uses of __builtin_va_arg_pack () are removed during
6722 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6725 case BUILT_IN_VA_ARG_PACK_LEN
:
6726 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6728 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6731 /* Return the address of the first anonymous stack arg. */
6732 case BUILT_IN_NEXT_ARG
:
6733 if (fold_builtin_next_arg (exp
, false))
6735 return expand_builtin_next_arg ();
6737 case BUILT_IN_CLEAR_CACHE
:
6738 target
= expand_builtin___clear_cache (exp
);
6743 case BUILT_IN_CLASSIFY_TYPE
:
6744 return expand_builtin_classify_type (exp
);
6746 case BUILT_IN_CONSTANT_P
:
6749 case BUILT_IN_FRAME_ADDRESS
:
6750 case BUILT_IN_RETURN_ADDRESS
:
6751 return expand_builtin_frame_address (fndecl
, exp
);
6753 /* Returns the address of the area where the structure is returned.
6755 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6756 if (call_expr_nargs (exp
) != 0
6757 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6758 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6761 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6763 CASE_BUILT_IN_ALLOCA
:
6764 target
= expand_builtin_alloca (exp
);
6769 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
6770 return expand_asan_emit_allocas_unpoison (exp
);
6772 case BUILT_IN_STACK_SAVE
:
6773 return expand_stack_save ();
6775 case BUILT_IN_STACK_RESTORE
:
6776 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6779 case BUILT_IN_BSWAP16
:
6780 case BUILT_IN_BSWAP32
:
6781 case BUILT_IN_BSWAP64
:
6782 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6787 CASE_INT_FN (BUILT_IN_FFS
):
6788 target
= expand_builtin_unop (target_mode
, exp
, target
,
6789 subtarget
, ffs_optab
);
6794 CASE_INT_FN (BUILT_IN_CLZ
):
6795 target
= expand_builtin_unop (target_mode
, exp
, target
,
6796 subtarget
, clz_optab
);
6801 CASE_INT_FN (BUILT_IN_CTZ
):
6802 target
= expand_builtin_unop (target_mode
, exp
, target
,
6803 subtarget
, ctz_optab
);
6808 CASE_INT_FN (BUILT_IN_CLRSB
):
6809 target
= expand_builtin_unop (target_mode
, exp
, target
,
6810 subtarget
, clrsb_optab
);
6815 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6816 target
= expand_builtin_unop (target_mode
, exp
, target
,
6817 subtarget
, popcount_optab
);
6822 CASE_INT_FN (BUILT_IN_PARITY
):
6823 target
= expand_builtin_unop (target_mode
, exp
, target
,
6824 subtarget
, parity_optab
);
6829 case BUILT_IN_STRLEN
:
6830 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6835 case BUILT_IN_STRCAT
:
6836 target
= expand_builtin_strcat (exp
, target
);
6841 case BUILT_IN_STRCPY
:
6842 target
= expand_builtin_strcpy (exp
, target
);
6847 case BUILT_IN_STRNCAT
:
6848 target
= expand_builtin_strncat (exp
, target
);
6853 case BUILT_IN_STRNCPY
:
6854 target
= expand_builtin_strncpy (exp
, target
);
6859 case BUILT_IN_STPCPY
:
6860 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6865 case BUILT_IN_STPNCPY
:
6866 target
= expand_builtin_stpncpy (exp
, target
);
6871 case BUILT_IN_MEMCHR
:
6872 target
= expand_builtin_memchr (exp
, target
);
6877 case BUILT_IN_MEMCPY
:
6878 target
= expand_builtin_memcpy (exp
, target
);
6883 case BUILT_IN_MEMMOVE
:
6884 target
= expand_builtin_memmove (exp
, target
);
6889 case BUILT_IN_MEMPCPY
:
6890 target
= expand_builtin_mempcpy (exp
, target
);
6895 case BUILT_IN_MEMSET
:
6896 target
= expand_builtin_memset (exp
, target
, mode
);
6901 case BUILT_IN_BZERO
:
6902 target
= expand_builtin_bzero (exp
);
6907 case BUILT_IN_STRCMP
:
6908 target
= expand_builtin_strcmp (exp
, target
);
6913 case BUILT_IN_STRNCMP
:
6914 target
= expand_builtin_strncmp (exp
, target
, mode
);
6920 case BUILT_IN_MEMCMP
:
6921 case BUILT_IN_MEMCMP_EQ
:
6922 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
6925 if (fcode
== BUILT_IN_MEMCMP_EQ
)
6927 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
6928 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
6932 case BUILT_IN_SETJMP
:
6933 /* This should have been lowered to the builtins below. */
6936 case BUILT_IN_SETJMP_SETUP
:
6937 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6938 and the receiver label. */
6939 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6941 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6942 VOIDmode
, EXPAND_NORMAL
);
6943 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6944 rtx_insn
*label_r
= label_rtx (label
);
6946 /* This is copied from the handling of non-local gotos. */
6947 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6948 nonlocal_goto_handler_labels
6949 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6950 nonlocal_goto_handler_labels
);
6951 /* ??? Do not let expand_label treat us as such since we would
6952 not want to be both on the list of non-local labels and on
6953 the list of forced labels. */
6954 FORCED_LABEL (label
) = 0;
6959 case BUILT_IN_SETJMP_RECEIVER
:
6960 /* __builtin_setjmp_receiver is passed the receiver label. */
6961 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6963 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6964 rtx_insn
*label_r
= label_rtx (label
);
6966 expand_builtin_setjmp_receiver (label_r
);
6971 /* __builtin_longjmp is passed a pointer to an array of five words.
6972 It's similar to the C library longjmp function but works with
6973 __builtin_setjmp above. */
6974 case BUILT_IN_LONGJMP
:
6975 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6977 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6978 VOIDmode
, EXPAND_NORMAL
);
6979 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6981 if (value
!= const1_rtx
)
6983 error ("%<__builtin_longjmp%> second argument must be 1");
6987 expand_builtin_longjmp (buf_addr
, value
);
6992 case BUILT_IN_NONLOCAL_GOTO
:
6993 target
= expand_builtin_nonlocal_goto (exp
);
6998 /* This updates the setjmp buffer that is its argument with the value
6999 of the current stack pointer. */
7000 case BUILT_IN_UPDATE_SETJMP_BUF
:
7001 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7004 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7006 expand_builtin_update_setjmp_buf (buf_addr
);
7012 expand_builtin_trap ();
7015 case BUILT_IN_UNREACHABLE
:
7016 expand_builtin_unreachable ();
7019 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7020 case BUILT_IN_SIGNBITD32
:
7021 case BUILT_IN_SIGNBITD64
:
7022 case BUILT_IN_SIGNBITD128
:
7023 target
= expand_builtin_signbit (exp
, target
);
7028 /* Various hooks for the DWARF 2 __throw routine. */
7029 case BUILT_IN_UNWIND_INIT
:
7030 expand_builtin_unwind_init ();
7032 case BUILT_IN_DWARF_CFA
:
7033 return virtual_cfa_rtx
;
7034 #ifdef DWARF2_UNWIND_INFO
7035 case BUILT_IN_DWARF_SP_COLUMN
:
7036 return expand_builtin_dwarf_sp_column ();
7037 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7038 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7041 case BUILT_IN_FROB_RETURN_ADDR
:
7042 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7043 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7044 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7045 case BUILT_IN_EH_RETURN
:
7046 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7047 CALL_EXPR_ARG (exp
, 1));
7049 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7050 return expand_builtin_eh_return_data_regno (exp
);
7051 case BUILT_IN_EXTEND_POINTER
:
7052 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7053 case BUILT_IN_EH_POINTER
:
7054 return expand_builtin_eh_pointer (exp
);
7055 case BUILT_IN_EH_FILTER
:
7056 return expand_builtin_eh_filter (exp
);
7057 case BUILT_IN_EH_COPY_VALUES
:
7058 return expand_builtin_eh_copy_values (exp
);
7060 case BUILT_IN_VA_START
:
7061 return expand_builtin_va_start (exp
);
7062 case BUILT_IN_VA_END
:
7063 return expand_builtin_va_end (exp
);
7064 case BUILT_IN_VA_COPY
:
7065 return expand_builtin_va_copy (exp
);
7066 case BUILT_IN_EXPECT
:
7067 return expand_builtin_expect (exp
, target
);
7068 case BUILT_IN_ASSUME_ALIGNED
:
7069 return expand_builtin_assume_aligned (exp
, target
);
7070 case BUILT_IN_PREFETCH
:
7071 expand_builtin_prefetch (exp
);
7074 case BUILT_IN_INIT_TRAMPOLINE
:
7075 return expand_builtin_init_trampoline (exp
, true);
7076 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7077 return expand_builtin_init_trampoline (exp
, false);
7078 case BUILT_IN_ADJUST_TRAMPOLINE
:
7079 return expand_builtin_adjust_trampoline (exp
);
7081 case BUILT_IN_INIT_DESCRIPTOR
:
7082 return expand_builtin_init_descriptor (exp
);
7083 case BUILT_IN_ADJUST_DESCRIPTOR
:
7084 return expand_builtin_adjust_descriptor (exp
);
7087 case BUILT_IN_EXECL
:
7088 case BUILT_IN_EXECV
:
7089 case BUILT_IN_EXECLP
:
7090 case BUILT_IN_EXECLE
:
7091 case BUILT_IN_EXECVP
:
7092 case BUILT_IN_EXECVE
:
7093 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7098 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7099 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7100 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7101 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7102 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7103 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7104 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7109 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7110 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7111 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7112 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7113 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7114 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7115 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7120 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7121 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7122 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7123 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7124 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7125 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7126 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7131 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7132 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7133 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7134 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7135 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7136 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7137 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7142 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7143 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7144 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7145 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7146 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7147 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7148 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7153 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7154 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7155 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7156 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7157 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7158 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7159 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7164 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7165 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7166 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7167 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7168 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7169 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7170 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7175 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7176 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7177 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7178 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7179 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7180 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7181 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7186 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7187 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7188 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7189 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7190 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7191 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7192 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7197 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7198 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7199 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7200 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7201 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7202 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7203 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7208 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7209 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7210 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7211 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7212 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7213 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7214 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7219 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7220 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7221 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7222 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7223 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7224 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7225 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7230 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7231 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7232 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7233 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7234 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7235 if (mode
== VOIDmode
)
7236 mode
= TYPE_MODE (boolean_type_node
);
7237 if (!target
|| !register_operand (target
, mode
))
7238 target
= gen_reg_rtx (mode
);
7240 mode
= get_builtin_sync_mode
7241 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7242 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7247 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7248 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7249 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7250 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7251 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7252 mode
= get_builtin_sync_mode
7253 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7254 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7259 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7260 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7261 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7262 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7263 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7264 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7265 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7270 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
7271 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
7272 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
7273 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
7274 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
7275 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
7276 expand_builtin_sync_lock_release (mode
, exp
);
7279 case BUILT_IN_SYNC_SYNCHRONIZE
:
7280 expand_builtin_sync_synchronize ();
7283 case BUILT_IN_ATOMIC_EXCHANGE_1
:
7284 case BUILT_IN_ATOMIC_EXCHANGE_2
:
7285 case BUILT_IN_ATOMIC_EXCHANGE_4
:
7286 case BUILT_IN_ATOMIC_EXCHANGE_8
:
7287 case BUILT_IN_ATOMIC_EXCHANGE_16
:
7288 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
7289 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
7294 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
7295 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
7296 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
7297 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
7298 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
7300 unsigned int nargs
, z
;
7301 vec
<tree
, va_gc
> *vec
;
7304 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
7305 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
7309 /* If this is turned into an external library call, the weak parameter
7310 must be dropped to match the expected parameter list. */
7311 nargs
= call_expr_nargs (exp
);
7312 vec_alloc (vec
, nargs
- 1);
7313 for (z
= 0; z
< 3; z
++)
7314 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7315 /* Skip the boolean weak parameter. */
7316 for (z
= 4; z
< 6; z
++)
7317 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7318 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
7322 case BUILT_IN_ATOMIC_LOAD_1
:
7323 case BUILT_IN_ATOMIC_LOAD_2
:
7324 case BUILT_IN_ATOMIC_LOAD_4
:
7325 case BUILT_IN_ATOMIC_LOAD_8
:
7326 case BUILT_IN_ATOMIC_LOAD_16
:
7327 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
7328 target
= expand_builtin_atomic_load (mode
, exp
, target
);
7333 case BUILT_IN_ATOMIC_STORE_1
:
7334 case BUILT_IN_ATOMIC_STORE_2
:
7335 case BUILT_IN_ATOMIC_STORE_4
:
7336 case BUILT_IN_ATOMIC_STORE_8
:
7337 case BUILT_IN_ATOMIC_STORE_16
:
7338 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
7339 target
= expand_builtin_atomic_store (mode
, exp
);
7344 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
7345 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
7346 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
7347 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
7348 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
7350 enum built_in_function lib
;
7351 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
7352 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
7353 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
7354 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
7360 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
7361 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
7362 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
7363 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
7364 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
7366 enum built_in_function lib
;
7367 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
7368 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
7369 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
7370 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
7376 case BUILT_IN_ATOMIC_AND_FETCH_1
:
7377 case BUILT_IN_ATOMIC_AND_FETCH_2
:
7378 case BUILT_IN_ATOMIC_AND_FETCH_4
:
7379 case BUILT_IN_ATOMIC_AND_FETCH_8
:
7380 case BUILT_IN_ATOMIC_AND_FETCH_16
:
7382 enum built_in_function lib
;
7383 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
7384 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
7385 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
7386 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
7392 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
7393 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
7394 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
7395 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
7396 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
7398 enum built_in_function lib
;
7399 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
7400 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
7401 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
7402 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
7408 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
7409 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
7410 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
7411 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
7412 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
7414 enum built_in_function lib
;
7415 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
7416 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
7417 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
7418 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
7424 case BUILT_IN_ATOMIC_OR_FETCH_1
:
7425 case BUILT_IN_ATOMIC_OR_FETCH_2
:
7426 case BUILT_IN_ATOMIC_OR_FETCH_4
:
7427 case BUILT_IN_ATOMIC_OR_FETCH_8
:
7428 case BUILT_IN_ATOMIC_OR_FETCH_16
:
7430 enum built_in_function lib
;
7431 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
7432 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
7433 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
7434 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
7440 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
7441 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
7442 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
7443 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
7444 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
7445 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
7446 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
7447 ignore
, BUILT_IN_NONE
);
7452 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
7453 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
7454 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
7455 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
7456 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
7457 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
7458 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
7459 ignore
, BUILT_IN_NONE
);
7464 case BUILT_IN_ATOMIC_FETCH_AND_1
:
7465 case BUILT_IN_ATOMIC_FETCH_AND_2
:
7466 case BUILT_IN_ATOMIC_FETCH_AND_4
:
7467 case BUILT_IN_ATOMIC_FETCH_AND_8
:
7468 case BUILT_IN_ATOMIC_FETCH_AND_16
:
7469 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
7470 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
7471 ignore
, BUILT_IN_NONE
);
7476 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
7477 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
7478 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
7479 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
7480 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
7481 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
7482 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
7483 ignore
, BUILT_IN_NONE
);
7488 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
7489 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
7490 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
7491 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
7492 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
7493 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
7494 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
7495 ignore
, BUILT_IN_NONE
);
7500 case BUILT_IN_ATOMIC_FETCH_OR_1
:
7501 case BUILT_IN_ATOMIC_FETCH_OR_2
:
7502 case BUILT_IN_ATOMIC_FETCH_OR_4
:
7503 case BUILT_IN_ATOMIC_FETCH_OR_8
:
7504 case BUILT_IN_ATOMIC_FETCH_OR_16
:
7505 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
7506 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
7507 ignore
, BUILT_IN_NONE
);
7512 case BUILT_IN_ATOMIC_TEST_AND_SET
:
7513 return expand_builtin_atomic_test_and_set (exp
, target
);
7515 case BUILT_IN_ATOMIC_CLEAR
:
7516 return expand_builtin_atomic_clear (exp
);
7518 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
7519 return expand_builtin_atomic_always_lock_free (exp
);
7521 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
7522 target
= expand_builtin_atomic_is_lock_free (exp
);
7527 case BUILT_IN_ATOMIC_THREAD_FENCE
:
7528 expand_builtin_atomic_thread_fence (exp
);
7531 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
7532 expand_builtin_atomic_signal_fence (exp
);
7535 case BUILT_IN_OBJECT_SIZE
:
7536 return expand_builtin_object_size (exp
);
7538 case BUILT_IN_MEMCPY_CHK
:
7539 case BUILT_IN_MEMPCPY_CHK
:
7540 case BUILT_IN_MEMMOVE_CHK
:
7541 case BUILT_IN_MEMSET_CHK
:
7542 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7547 case BUILT_IN_STRCPY_CHK
:
7548 case BUILT_IN_STPCPY_CHK
:
7549 case BUILT_IN_STRNCPY_CHK
:
7550 case BUILT_IN_STPNCPY_CHK
:
7551 case BUILT_IN_STRCAT_CHK
:
7552 case BUILT_IN_STRNCAT_CHK
:
7553 case BUILT_IN_SNPRINTF_CHK
:
7554 case BUILT_IN_VSNPRINTF_CHK
:
7555 maybe_emit_chk_warning (exp
, fcode
);
7558 case BUILT_IN_SPRINTF_CHK
:
7559 case BUILT_IN_VSPRINTF_CHK
:
7560 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7564 if (warn_free_nonheap_object
)
7565 maybe_emit_free_warning (exp
);
7568 case BUILT_IN_THREAD_POINTER
:
7569 return expand_builtin_thread_pointer (exp
, target
);
7571 case BUILT_IN_SET_THREAD_POINTER
:
7572 expand_builtin_set_thread_pointer (exp
);
7575 case BUILT_IN_CILK_DETACH
:
7576 expand_builtin_cilk_detach (exp
);
7579 case BUILT_IN_CILK_POP_FRAME
:
7580 expand_builtin_cilk_pop_frame (exp
);
7583 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7584 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7585 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7586 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7587 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7588 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7589 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7590 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7591 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7592 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7593 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7594 /* We allow user CHKP builtins if Pointer Bounds
7596 if (!chkp_function_instrumented_p (current_function_decl
))
7598 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7599 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7600 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7601 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7602 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7603 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7604 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7605 return expand_normal (size_zero_node
);
7606 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7607 return expand_normal (size_int (-1));
7613 case BUILT_IN_CHKP_BNDMK
:
7614 case BUILT_IN_CHKP_BNDSTX
:
7615 case BUILT_IN_CHKP_BNDCL
:
7616 case BUILT_IN_CHKP_BNDCU
:
7617 case BUILT_IN_CHKP_BNDLDX
:
7618 case BUILT_IN_CHKP_BNDRET
:
7619 case BUILT_IN_CHKP_INTERSECT
:
7620 case BUILT_IN_CHKP_NARROW
:
7621 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7622 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7623 /* Software implementation of Pointer Bounds Checker is NYI.
7624 Target support is required. */
7625 error ("Your target platform does not support -fcheck-pointer-bounds");
7628 case BUILT_IN_ACC_ON_DEVICE
:
7629 /* Do library call, if we failed to expand the builtin when
7633 default: /* just do library call, if unknown builtin */
7637 /* The switch statement above can drop through to cause the function
7638 to be called normally. */
7639 return expand_call (exp
, target
, ignore
);
7642 /* Similar to expand_builtin but is used for instrumented calls. */
7645 expand_builtin_with_bounds (tree exp
, rtx target
,
7646 rtx subtarget ATTRIBUTE_UNUSED
,
7647 machine_mode mode
, int ignore
)
7649 tree fndecl
= get_callee_fndecl (exp
);
7650 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7652 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7654 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7655 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7657 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7658 && fcode
< END_CHKP_BUILTINS
);
7662 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7663 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7668 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7669 target
= expand_builtin_mempcpy_with_bounds (exp
, target
);
7674 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7675 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7684 /* The switch statement above can drop through to cause the function
7685 to be called normally. */
7686 return expand_call (exp
, target
, ignore
);
7689 /* Determine whether a tree node represents a call to a built-in
7690 function. If the tree T is a call to a built-in function with
7691 the right number of arguments of the appropriate types, return
7692 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7693 Otherwise the return value is END_BUILTINS. */
7695 enum built_in_function
7696 builtin_mathfn_code (const_tree t
)
7698 const_tree fndecl
, arg
, parmlist
;
7699 const_tree argtype
, parmtype
;
7700 const_call_expr_arg_iterator iter
;
7702 if (TREE_CODE (t
) != CALL_EXPR
7703 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7704 return END_BUILTINS
;
7706 fndecl
= get_callee_fndecl (t
);
7707 if (fndecl
== NULL_TREE
7708 || TREE_CODE (fndecl
) != FUNCTION_DECL
7709 || ! DECL_BUILT_IN (fndecl
)
7710 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7711 return END_BUILTINS
;
7713 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7714 init_const_call_expr_arg_iterator (t
, &iter
);
7715 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7717 /* If a function doesn't take a variable number of arguments,
7718 the last element in the list will have type `void'. */
7719 parmtype
= TREE_VALUE (parmlist
);
7720 if (VOID_TYPE_P (parmtype
))
7722 if (more_const_call_expr_args_p (&iter
))
7723 return END_BUILTINS
;
7724 return DECL_FUNCTION_CODE (fndecl
);
7727 if (! more_const_call_expr_args_p (&iter
))
7728 return END_BUILTINS
;
7730 arg
= next_const_call_expr_arg (&iter
);
7731 argtype
= TREE_TYPE (arg
);
7733 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7735 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7736 return END_BUILTINS
;
7738 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7740 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7741 return END_BUILTINS
;
7743 else if (POINTER_TYPE_P (parmtype
))
7745 if (! POINTER_TYPE_P (argtype
))
7746 return END_BUILTINS
;
7748 else if (INTEGRAL_TYPE_P (parmtype
))
7750 if (! INTEGRAL_TYPE_P (argtype
))
7751 return END_BUILTINS
;
7754 return END_BUILTINS
;
7757 /* Variable-length argument list. */
7758 return DECL_FUNCTION_CODE (fndecl
);
7761 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7762 evaluate to a constant. */
7765 fold_builtin_constant_p (tree arg
)
7767 /* We return 1 for a numeric type that's known to be a constant
7768 value at compile-time or for an aggregate type that's a
7769 literal constant. */
7772 /* If we know this is a constant, emit the constant of one. */
7773 if (CONSTANT_CLASS_P (arg
)
7774 || (TREE_CODE (arg
) == CONSTRUCTOR
7775 && TREE_CONSTANT (arg
)))
7776 return integer_one_node
;
7777 if (TREE_CODE (arg
) == ADDR_EXPR
)
7779 tree op
= TREE_OPERAND (arg
, 0);
7780 if (TREE_CODE (op
) == STRING_CST
7781 || (TREE_CODE (op
) == ARRAY_REF
7782 && integer_zerop (TREE_OPERAND (op
, 1))
7783 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7784 return integer_one_node
;
7787 /* If this expression has side effects, show we don't know it to be a
7788 constant. Likewise if it's a pointer or aggregate type since in
7789 those case we only want literals, since those are only optimized
7790 when generating RTL, not later.
7791 And finally, if we are compiling an initializer, not code, we
7792 need to return a definite result now; there's not going to be any
7793 more optimization done. */
7794 if (TREE_SIDE_EFFECTS (arg
)
7795 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7796 || POINTER_TYPE_P (TREE_TYPE (arg
))
7798 || folding_initializer
7799 || force_folding_builtin_constant_p
)
7800 return integer_zero_node
;
7805 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7806 return it as a truthvalue. */
7809 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7812 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7814 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7815 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7816 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7817 pred_type
= TREE_VALUE (arg_types
);
7818 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7820 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7821 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7822 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7825 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7826 build_int_cst (ret_type
, 0));
7829 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7830 NULL_TREE if no simplification is possible. */
7833 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7835 tree inner
, fndecl
, inner_arg0
;
7836 enum tree_code code
;
7838 /* Distribute the expected value over short-circuiting operators.
7839 See through the cast from truthvalue_type_node to long. */
7841 while (CONVERT_EXPR_P (inner_arg0
)
7842 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7843 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7844 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7846 /* If this is a builtin_expect within a builtin_expect keep the
7847 inner one. See through a comparison against a constant. It
7848 might have been added to create a thruthvalue. */
7851 if (COMPARISON_CLASS_P (inner
)
7852 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7853 inner
= TREE_OPERAND (inner
, 0);
7855 if (TREE_CODE (inner
) == CALL_EXPR
7856 && (fndecl
= get_callee_fndecl (inner
))
7857 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7858 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7862 code
= TREE_CODE (inner
);
7863 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7865 tree op0
= TREE_OPERAND (inner
, 0);
7866 tree op1
= TREE_OPERAND (inner
, 1);
7868 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7869 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7870 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7872 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7875 /* If the argument isn't invariant then there's nothing else we can do. */
7876 if (!TREE_CONSTANT (inner_arg0
))
7879 /* If we expect that a comparison against the argument will fold to
7880 a constant return the constant. In practice, this means a true
7881 constant or the address of a non-weak symbol. */
7884 if (TREE_CODE (inner
) == ADDR_EXPR
)
7888 inner
= TREE_OPERAND (inner
, 0);
7890 while (TREE_CODE (inner
) == COMPONENT_REF
7891 || TREE_CODE (inner
) == ARRAY_REF
);
7892 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
7896 /* Otherwise, ARG0 already has the proper type for the return value. */
7900 /* Fold a call to __builtin_classify_type with argument ARG. */
7903 fold_builtin_classify_type (tree arg
)
7906 return build_int_cst (integer_type_node
, no_type_class
);
7908 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7911 /* Fold a call to __builtin_strlen with argument ARG. */
7914 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7916 if (!validate_arg (arg
, POINTER_TYPE
))
7920 tree len
= c_strlen (arg
, 0);
7923 return fold_convert_loc (loc
, type
, len
);
7929 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7932 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7934 REAL_VALUE_TYPE real
;
7936 /* __builtin_inff is intended to be usable to define INFINITY on all
7937 targets. If an infinity is not available, INFINITY expands "to a
7938 positive constant of type float that overflows at translation
7939 time", footnote "In this case, using INFINITY will violate the
7940 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7941 Thus we pedwarn to ensure this constraint violation is
7943 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7944 pedwarn (loc
, 0, "target format does not support infinity");
7947 return build_real (type
, real
);
7950 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7951 NULL_TREE if no simplification can be made. */
7954 fold_builtin_sincos (location_t loc
,
7955 tree arg0
, tree arg1
, tree arg2
)
7958 tree fndecl
, call
= NULL_TREE
;
7960 if (!validate_arg (arg0
, REAL_TYPE
)
7961 || !validate_arg (arg1
, POINTER_TYPE
)
7962 || !validate_arg (arg2
, POINTER_TYPE
))
7965 type
= TREE_TYPE (arg0
);
7967 /* Calculate the result when the argument is a constant. */
7968 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
7969 if (fn
== END_BUILTINS
)
7972 /* Canonicalize sincos to cexpi. */
7973 if (TREE_CODE (arg0
) == REAL_CST
)
7975 tree complex_type
= build_complex_type (type
);
7976 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
7980 if (!targetm
.libc_has_function (function_c99_math_complex
)
7981 || !builtin_decl_implicit_p (fn
))
7983 fndecl
= builtin_decl_explicit (fn
);
7984 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
7985 call
= builtin_save_expr (call
);
7988 return build2 (COMPOUND_EXPR
, void_type_node
,
7989 build2 (MODIFY_EXPR
, void_type_node
,
7990 build_fold_indirect_ref_loc (loc
, arg1
),
7991 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
7992 build2 (MODIFY_EXPR
, void_type_node
,
7993 build_fold_indirect_ref_loc (loc
, arg2
),
7994 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
7997 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7998 Return NULL_TREE if no simplification can be made. */
8001 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8003 if (!validate_arg (arg1
, POINTER_TYPE
)
8004 || !validate_arg (arg2
, POINTER_TYPE
)
8005 || !validate_arg (len
, INTEGER_TYPE
))
8008 /* If the LEN parameter is zero, return zero. */
8009 if (integer_zerop (len
))
8010 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8013 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8014 if (operand_equal_p (arg1
, arg2
, 0))
8015 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8017 /* If len parameter is one, return an expression corresponding to
8018 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8019 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8021 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8022 tree cst_uchar_ptr_node
8023 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8026 = fold_convert_loc (loc
, integer_type_node
,
8027 build1 (INDIRECT_REF
, cst_uchar_node
,
8028 fold_convert_loc (loc
,
8032 = fold_convert_loc (loc
, integer_type_node
,
8033 build1 (INDIRECT_REF
, cst_uchar_node
,
8034 fold_convert_loc (loc
,
8037 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8043 /* Fold a call to builtin isascii with argument ARG. */
8046 fold_builtin_isascii (location_t loc
, tree arg
)
8048 if (!validate_arg (arg
, INTEGER_TYPE
))
8052 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8053 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8054 build_int_cst (integer_type_node
,
8055 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8056 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8057 arg
, integer_zero_node
);
8061 /* Fold a call to builtin toascii with argument ARG. */
8064 fold_builtin_toascii (location_t loc
, tree arg
)
8066 if (!validate_arg (arg
, INTEGER_TYPE
))
8069 /* Transform toascii(c) -> (c & 0x7f). */
8070 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8071 build_int_cst (integer_type_node
, 0x7f));
8074 /* Fold a call to builtin isdigit with argument ARG. */
8077 fold_builtin_isdigit (location_t loc
, tree arg
)
8079 if (!validate_arg (arg
, INTEGER_TYPE
))
8083 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8084 /* According to the C standard, isdigit is unaffected by locale.
8085 However, it definitely is affected by the target character set. */
8086 unsigned HOST_WIDE_INT target_digit0
8087 = lang_hooks
.to_target_charset ('0');
8089 if (target_digit0
== 0)
8092 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8093 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8094 build_int_cst (unsigned_type_node
, target_digit0
));
8095 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8096 build_int_cst (unsigned_type_node
, 9));
8100 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8103 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8105 if (!validate_arg (arg
, REAL_TYPE
))
8108 arg
= fold_convert_loc (loc
, type
, arg
);
8109 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8112 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8115 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8117 if (!validate_arg (arg
, INTEGER_TYPE
))
8120 arg
= fold_convert_loc (loc
, type
, arg
);
8121 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8124 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8127 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8129 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8130 if (validate_arg (arg0
, REAL_TYPE
)
8131 && validate_arg (arg1
, REAL_TYPE
)
8132 && validate_arg (arg2
, REAL_TYPE
)
8133 && optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8134 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8139 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8142 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8144 if (validate_arg (arg
, COMPLEX_TYPE
)
8145 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8147 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8151 tree new_arg
= builtin_save_expr (arg
);
8152 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8153 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8154 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8161 /* Fold a call to builtin frexp, we can assume the base is 2. */
8164 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8166 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8171 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8174 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8176 /* Proceed if a valid pointer type was passed in. */
8177 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8179 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8185 /* For +-0, return (*exp = 0, +-0). */
8186 exp
= integer_zero_node
;
8191 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8192 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8195 /* Since the frexp function always expects base 2, and in
8196 GCC normalized significands are already in the range
8197 [0.5, 1.0), we have exactly what frexp wants. */
8198 REAL_VALUE_TYPE frac_rvt
= *value
;
8199 SET_REAL_EXP (&frac_rvt
, 0);
8200 frac
= build_real (rettype
, frac_rvt
);
8201 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8208 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8209 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8210 TREE_SIDE_EFFECTS (arg1
) = 1;
8211 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8217 /* Fold a call to builtin modf. */
8220 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8222 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8227 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8230 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8232 /* Proceed if a valid pointer type was passed in. */
8233 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8235 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8236 REAL_VALUE_TYPE trunc
, frac
;
8242 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8243 trunc
= frac
= *value
;
8246 /* For +-Inf, return (*arg1 = arg0, +-0). */
8248 frac
.sign
= value
->sign
;
8252 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8253 real_trunc (&trunc
, VOIDmode
, value
);
8254 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8255 /* If the original number was negative and already
8256 integral, then the fractional part is -0.0. */
8257 if (value
->sign
&& frac
.cl
== rvc_zero
)
8258 frac
.sign
= value
->sign
;
8262 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8263 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8264 build_real (rettype
, trunc
));
8265 TREE_SIDE_EFFECTS (arg1
) = 1;
8266 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8267 build_real (rettype
, frac
));
8273 /* Given a location LOC, an interclass builtin function decl FNDECL
8274 and its single argument ARG, return an folded expression computing
8275 the same, or NULL_TREE if we either couldn't or didn't want to fold
8276 (the latter happen if there's an RTL instruction available). */
8279 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8283 if (!validate_arg (arg
, REAL_TYPE
))
8286 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8289 mode
= TYPE_MODE (TREE_TYPE (arg
));
8291 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8293 /* If there is no optab, try generic code. */
8294 switch (DECL_FUNCTION_CODE (fndecl
))
8298 CASE_FLT_FN (BUILT_IN_ISINF
):
8300 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8301 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8302 tree type
= TREE_TYPE (arg
);
8306 if (is_ibm_extended
)
8308 /* NaN and Inf are encoded in the high-order double value
8309 only. The low-order value is not significant. */
8310 type
= double_type_node
;
8312 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8314 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8315 real_from_string (&r
, buf
);
8316 result
= build_call_expr (isgr_fn
, 2,
8317 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8318 build_real (type
, r
));
8321 CASE_FLT_FN (BUILT_IN_FINITE
):
8322 case BUILT_IN_ISFINITE
:
8324 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8325 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8326 tree type
= TREE_TYPE (arg
);
8330 if (is_ibm_extended
)
8332 /* NaN and Inf are encoded in the high-order double value
8333 only. The low-order value is not significant. */
8334 type
= double_type_node
;
8336 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8338 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8339 real_from_string (&r
, buf
);
8340 result
= build_call_expr (isle_fn
, 2,
8341 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8342 build_real (type
, r
));
8343 /*result = fold_build2_loc (loc, UNGT_EXPR,
8344 TREE_TYPE (TREE_TYPE (fndecl)),
8345 fold_build1_loc (loc, ABS_EXPR, type, arg),
8346 build_real (type, r));
8347 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8348 TREE_TYPE (TREE_TYPE (fndecl)),
8352 case BUILT_IN_ISNORMAL
:
8354 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8355 islessequal(fabs(x),DBL_MAX). */
8356 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8357 tree type
= TREE_TYPE (arg
);
8358 tree orig_arg
, max_exp
, min_exp
;
8359 machine_mode orig_mode
= mode
;
8360 REAL_VALUE_TYPE rmax
, rmin
;
8363 orig_arg
= arg
= builtin_save_expr (arg
);
8364 if (is_ibm_extended
)
8366 /* Use double to test the normal range of IBM extended
8367 precision. Emin for IBM extended precision is
8368 different to emin for IEEE double, being 53 higher
8369 since the low double exponent is at least 53 lower
8370 than the high double exponent. */
8371 type
= double_type_node
;
8373 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8375 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8377 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8378 real_from_string (&rmax
, buf
);
8379 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
8380 real_from_string (&rmin
, buf
);
8381 max_exp
= build_real (type
, rmax
);
8382 min_exp
= build_real (type
, rmin
);
8384 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
8385 if (is_ibm_extended
)
8387 /* Testing the high end of the range is done just using
8388 the high double, using the same test as isfinite().
8389 For the subnormal end of the range we first test the
8390 high double, then if its magnitude is equal to the
8391 limit of 0x1p-969, we test whether the low double is
8392 non-zero and opposite sign to the high double. */
8393 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
8394 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8395 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
8396 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
8398 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
8399 complex_double_type_node
, orig_arg
);
8400 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
8401 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
8402 tree zero
= build_real (type
, dconst0
);
8403 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
8404 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
8405 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
8406 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
8407 fold_build3 (COND_EXPR
,
8410 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
8412 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
8418 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8419 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
8421 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
8432 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8433 ARG is the argument for the call. */
8436 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8438 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8440 if (!validate_arg (arg
, REAL_TYPE
))
8443 switch (builtin_index
)
8445 case BUILT_IN_ISINF
:
8446 if (!HONOR_INFINITIES (arg
))
8447 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8451 case BUILT_IN_ISINF_SIGN
:
8453 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8454 /* In a boolean context, GCC will fold the inner COND_EXPR to
8455 1. So e.g. "if (isinf_sign(x))" would be folded to just
8456 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8457 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
8458 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8459 tree tmp
= NULL_TREE
;
8461 arg
= builtin_save_expr (arg
);
8463 if (signbit_fn
&& isinf_fn
)
8465 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8466 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8468 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8469 signbit_call
, integer_zero_node
);
8470 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8471 isinf_call
, integer_zero_node
);
8473 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8474 integer_minus_one_node
, integer_one_node
);
8475 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8483 case BUILT_IN_ISFINITE
:
8484 if (!HONOR_NANS (arg
)
8485 && !HONOR_INFINITIES (arg
))
8486 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8490 case BUILT_IN_ISNAN
:
8491 if (!HONOR_NANS (arg
))
8492 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8495 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
8496 if (is_ibm_extended
)
8498 /* NaN and Inf are encoded in the high-order double value
8499 only. The low-order value is not significant. */
8500 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
8503 arg
= builtin_save_expr (arg
);
8504 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8511 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8512 This builtin will generate code to return the appropriate floating
8513 point classification depending on the value of the floating point
8514 number passed in. The possible return values must be supplied as
8515 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8516 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8517 one floating point argument which is "type generic". */
8520 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8522 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8523 arg
, type
, res
, tmp
;
8528 /* Verify the required arguments in the original call. */
8530 || !validate_arg (args
[0], INTEGER_TYPE
)
8531 || !validate_arg (args
[1], INTEGER_TYPE
)
8532 || !validate_arg (args
[2], INTEGER_TYPE
)
8533 || !validate_arg (args
[3], INTEGER_TYPE
)
8534 || !validate_arg (args
[4], INTEGER_TYPE
)
8535 || !validate_arg (args
[5], REAL_TYPE
))
8539 fp_infinite
= args
[1];
8540 fp_normal
= args
[2];
8541 fp_subnormal
= args
[3];
8544 type
= TREE_TYPE (arg
);
8545 mode
= TYPE_MODE (type
);
8546 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8550 (fabs(x) == Inf ? FP_INFINITE :
8551 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8552 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8554 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8555 build_real (type
, dconst0
));
8556 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8557 tmp
, fp_zero
, fp_subnormal
);
8559 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8560 real_from_string (&r
, buf
);
8561 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8562 arg
, build_real (type
, r
));
8563 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8565 if (HONOR_INFINITIES (mode
))
8568 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8569 build_real (type
, r
));
8570 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8574 if (HONOR_NANS (mode
))
8576 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8577 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8583 /* Fold a call to an unordered comparison function such as
8584 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8585 being called and ARG0 and ARG1 are the arguments for the call.
8586 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8587 the opposite of the desired result. UNORDERED_CODE is used
8588 for modes that can hold NaNs and ORDERED_CODE is used for
8592 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8593 enum tree_code unordered_code
,
8594 enum tree_code ordered_code
)
8596 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8597 enum tree_code code
;
8599 enum tree_code code0
, code1
;
8600 tree cmp_type
= NULL_TREE
;
8602 type0
= TREE_TYPE (arg0
);
8603 type1
= TREE_TYPE (arg1
);
8605 code0
= TREE_CODE (type0
);
8606 code1
= TREE_CODE (type1
);
8608 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8609 /* Choose the wider of two real types. */
8610 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8612 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8614 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8617 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8618 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8620 if (unordered_code
== UNORDERED_EXPR
)
8622 if (!HONOR_NANS (arg0
))
8623 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8624 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8627 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8628 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8629 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8632 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8633 arithmetics if it can never overflow, or into internal functions that
8634 return both result of arithmetics and overflowed boolean flag in
8635 a complex integer result, or some other check for overflow.
8636 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8637 checking part of that. */
8640 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8641 tree arg0
, tree arg1
, tree arg2
)
8643 enum internal_fn ifn
= IFN_LAST
;
8644 /* The code of the expression corresponding to the type-generic
8645 built-in, or ERROR_MARK for the type-specific ones. */
8646 enum tree_code opcode
= ERROR_MARK
;
8647 bool ovf_only
= false;
8651 case BUILT_IN_ADD_OVERFLOW_P
:
8654 case BUILT_IN_ADD_OVERFLOW
:
8657 case BUILT_IN_SADD_OVERFLOW
:
8658 case BUILT_IN_SADDL_OVERFLOW
:
8659 case BUILT_IN_SADDLL_OVERFLOW
:
8660 case BUILT_IN_UADD_OVERFLOW
:
8661 case BUILT_IN_UADDL_OVERFLOW
:
8662 case BUILT_IN_UADDLL_OVERFLOW
:
8663 ifn
= IFN_ADD_OVERFLOW
;
8665 case BUILT_IN_SUB_OVERFLOW_P
:
8668 case BUILT_IN_SUB_OVERFLOW
:
8669 opcode
= MINUS_EXPR
;
8671 case BUILT_IN_SSUB_OVERFLOW
:
8672 case BUILT_IN_SSUBL_OVERFLOW
:
8673 case BUILT_IN_SSUBLL_OVERFLOW
:
8674 case BUILT_IN_USUB_OVERFLOW
:
8675 case BUILT_IN_USUBL_OVERFLOW
:
8676 case BUILT_IN_USUBLL_OVERFLOW
:
8677 ifn
= IFN_SUB_OVERFLOW
;
8679 case BUILT_IN_MUL_OVERFLOW_P
:
8682 case BUILT_IN_MUL_OVERFLOW
:
8685 case BUILT_IN_SMUL_OVERFLOW
:
8686 case BUILT_IN_SMULL_OVERFLOW
:
8687 case BUILT_IN_SMULLL_OVERFLOW
:
8688 case BUILT_IN_UMUL_OVERFLOW
:
8689 case BUILT_IN_UMULL_OVERFLOW
:
8690 case BUILT_IN_UMULLL_OVERFLOW
:
8691 ifn
= IFN_MUL_OVERFLOW
;
8697 /* For the "generic" overloads, the first two arguments can have different
8698 types and the last argument determines the target type to use to check
8699 for overflow. The arguments of the other overloads all have the same
8701 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8703 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8704 arguments are constant, attempt to fold the built-in call into a constant
8705 expression indicating whether or not it detected an overflow. */
8707 && TREE_CODE (arg0
) == INTEGER_CST
8708 && TREE_CODE (arg1
) == INTEGER_CST
)
8709 /* Perform the computation in the target type and check for overflow. */
8710 return omit_one_operand_loc (loc
, boolean_type_node
,
8711 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8712 ? boolean_true_node
: boolean_false_node
,
8715 tree ctype
= build_complex_type (type
);
8716 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8718 tree tgt
= save_expr (call
);
8719 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8720 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8721 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8724 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8726 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8728 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8729 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8732 /* Fold a call to __builtin_FILE to a constant string. */
8735 fold_builtin_FILE (location_t loc
)
8737 if (const char *fname
= LOCATION_FILE (loc
))
8738 return build_string_literal (strlen (fname
) + 1, fname
);
8740 return build_string_literal (1, "");
8743 /* Fold a call to __builtin_FUNCTION to a constant string. */
8746 fold_builtin_FUNCTION ()
8748 const char *name
= "";
8750 if (current_function_decl
)
8751 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
8753 return build_string_literal (strlen (name
) + 1, name
);
8756 /* Fold a call to __builtin_LINE to an integer constant. */
8759 fold_builtin_LINE (location_t loc
, tree type
)
8761 return build_int_cst (type
, LOCATION_LINE (loc
));
8764 /* Fold a call to built-in function FNDECL with 0 arguments.
8765 This function returns NULL_TREE if no simplification was possible. */
8768 fold_builtin_0 (location_t loc
, tree fndecl
)
8770 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8771 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8775 return fold_builtin_FILE (loc
);
8777 case BUILT_IN_FUNCTION
:
8778 return fold_builtin_FUNCTION ();
8781 return fold_builtin_LINE (loc
, type
);
8783 CASE_FLT_FN (BUILT_IN_INF
):
8784 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
8785 case BUILT_IN_INFD32
:
8786 case BUILT_IN_INFD64
:
8787 case BUILT_IN_INFD128
:
8788 return fold_builtin_inf (loc
, type
, true);
8790 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8791 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
8792 return fold_builtin_inf (loc
, type
, false);
8794 case BUILT_IN_CLASSIFY_TYPE
:
8795 return fold_builtin_classify_type (NULL_TREE
);
8803 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8804 This function returns NULL_TREE if no simplification was possible. */
8807 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8809 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8810 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8812 if (TREE_CODE (arg0
) == ERROR_MARK
)
8815 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
8820 case BUILT_IN_CONSTANT_P
:
8822 tree val
= fold_builtin_constant_p (arg0
);
8824 /* Gimplification will pull the CALL_EXPR for the builtin out of
8825 an if condition. When not optimizing, we'll not CSE it back.
8826 To avoid link error types of regressions, return false now. */
8827 if (!val
&& !optimize
)
8828 val
= integer_zero_node
;
8833 case BUILT_IN_CLASSIFY_TYPE
:
8834 return fold_builtin_classify_type (arg0
);
8836 case BUILT_IN_STRLEN
:
8837 return fold_builtin_strlen (loc
, type
, arg0
);
8839 CASE_FLT_FN (BUILT_IN_FABS
):
8840 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8841 case BUILT_IN_FABSD32
:
8842 case BUILT_IN_FABSD64
:
8843 case BUILT_IN_FABSD128
:
8844 return fold_builtin_fabs (loc
, arg0
, type
);
8848 case BUILT_IN_LLABS
:
8849 case BUILT_IN_IMAXABS
:
8850 return fold_builtin_abs (loc
, arg0
, type
);
8852 CASE_FLT_FN (BUILT_IN_CONJ
):
8853 if (validate_arg (arg0
, COMPLEX_TYPE
)
8854 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8855 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
8858 CASE_FLT_FN (BUILT_IN_CREAL
):
8859 if (validate_arg (arg0
, COMPLEX_TYPE
)
8860 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8861 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
8864 CASE_FLT_FN (BUILT_IN_CIMAG
):
8865 if (validate_arg (arg0
, COMPLEX_TYPE
)
8866 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8867 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
8870 CASE_FLT_FN (BUILT_IN_CARG
):
8871 return fold_builtin_carg (loc
, arg0
, type
);
8873 case BUILT_IN_ISASCII
:
8874 return fold_builtin_isascii (loc
, arg0
);
8876 case BUILT_IN_TOASCII
:
8877 return fold_builtin_toascii (loc
, arg0
);
8879 case BUILT_IN_ISDIGIT
:
8880 return fold_builtin_isdigit (loc
, arg0
);
8882 CASE_FLT_FN (BUILT_IN_FINITE
):
8883 case BUILT_IN_FINITED32
:
8884 case BUILT_IN_FINITED64
:
8885 case BUILT_IN_FINITED128
:
8886 case BUILT_IN_ISFINITE
:
8888 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
8891 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8894 CASE_FLT_FN (BUILT_IN_ISINF
):
8895 case BUILT_IN_ISINFD32
:
8896 case BUILT_IN_ISINFD64
:
8897 case BUILT_IN_ISINFD128
:
8899 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
8902 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8905 case BUILT_IN_ISNORMAL
:
8906 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8908 case BUILT_IN_ISINF_SIGN
:
8909 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
8911 CASE_FLT_FN (BUILT_IN_ISNAN
):
8912 case BUILT_IN_ISNAND32
:
8913 case BUILT_IN_ISNAND64
:
8914 case BUILT_IN_ISNAND128
:
8915 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
8918 if (integer_zerop (arg0
))
8919 return build_empty_stmt (loc
);
8930 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8931 This function returns NULL_TREE if no simplification was possible. */
8934 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
8936 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8937 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8939 if (TREE_CODE (arg0
) == ERROR_MARK
8940 || TREE_CODE (arg1
) == ERROR_MARK
)
8943 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
8948 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
8949 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
8950 if (validate_arg (arg0
, REAL_TYPE
)
8951 && validate_arg (arg1
, POINTER_TYPE
))
8952 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
8955 CASE_FLT_FN (BUILT_IN_FREXP
):
8956 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
8958 CASE_FLT_FN (BUILT_IN_MODF
):
8959 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
8961 case BUILT_IN_STRSPN
:
8962 return fold_builtin_strspn (loc
, arg0
, arg1
);
8964 case BUILT_IN_STRCSPN
:
8965 return fold_builtin_strcspn (loc
, arg0
, arg1
);
8967 case BUILT_IN_STRPBRK
:
8968 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
8970 case BUILT_IN_EXPECT
:
8971 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
8973 case BUILT_IN_ISGREATER
:
8974 return fold_builtin_unordered_cmp (loc
, fndecl
,
8975 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
8976 case BUILT_IN_ISGREATEREQUAL
:
8977 return fold_builtin_unordered_cmp (loc
, fndecl
,
8978 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
8979 case BUILT_IN_ISLESS
:
8980 return fold_builtin_unordered_cmp (loc
, fndecl
,
8981 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
8982 case BUILT_IN_ISLESSEQUAL
:
8983 return fold_builtin_unordered_cmp (loc
, fndecl
,
8984 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
8985 case BUILT_IN_ISLESSGREATER
:
8986 return fold_builtin_unordered_cmp (loc
, fndecl
,
8987 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
8988 case BUILT_IN_ISUNORDERED
:
8989 return fold_builtin_unordered_cmp (loc
, fndecl
,
8990 arg0
, arg1
, UNORDERED_EXPR
,
8993 /* We do the folding for va_start in the expander. */
8994 case BUILT_IN_VA_START
:
8997 case BUILT_IN_OBJECT_SIZE
:
8998 return fold_builtin_object_size (arg0
, arg1
);
9000 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9001 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9003 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9004 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9012 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9014 This function returns NULL_TREE if no simplification was possible. */
9017 fold_builtin_3 (location_t loc
, tree fndecl
,
9018 tree arg0
, tree arg1
, tree arg2
)
9020 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9021 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9023 if (TREE_CODE (arg0
) == ERROR_MARK
9024 || TREE_CODE (arg1
) == ERROR_MARK
9025 || TREE_CODE (arg2
) == ERROR_MARK
)
9028 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9035 CASE_FLT_FN (BUILT_IN_SINCOS
):
9036 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9038 CASE_FLT_FN (BUILT_IN_FMA
):
9039 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
9040 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
9042 CASE_FLT_FN (BUILT_IN_REMQUO
):
9043 if (validate_arg (arg0
, REAL_TYPE
)
9044 && validate_arg (arg1
, REAL_TYPE
)
9045 && validate_arg (arg2
, POINTER_TYPE
))
9046 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9049 case BUILT_IN_MEMCMP
:
9050 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
9052 case BUILT_IN_EXPECT
:
9053 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
9055 case BUILT_IN_ADD_OVERFLOW
:
9056 case BUILT_IN_SUB_OVERFLOW
:
9057 case BUILT_IN_MUL_OVERFLOW
:
9058 case BUILT_IN_ADD_OVERFLOW_P
:
9059 case BUILT_IN_SUB_OVERFLOW_P
:
9060 case BUILT_IN_MUL_OVERFLOW_P
:
9061 case BUILT_IN_SADD_OVERFLOW
:
9062 case BUILT_IN_SADDL_OVERFLOW
:
9063 case BUILT_IN_SADDLL_OVERFLOW
:
9064 case BUILT_IN_SSUB_OVERFLOW
:
9065 case BUILT_IN_SSUBL_OVERFLOW
:
9066 case BUILT_IN_SSUBLL_OVERFLOW
:
9067 case BUILT_IN_SMUL_OVERFLOW
:
9068 case BUILT_IN_SMULL_OVERFLOW
:
9069 case BUILT_IN_SMULLL_OVERFLOW
:
9070 case BUILT_IN_UADD_OVERFLOW
:
9071 case BUILT_IN_UADDL_OVERFLOW
:
9072 case BUILT_IN_UADDLL_OVERFLOW
:
9073 case BUILT_IN_USUB_OVERFLOW
:
9074 case BUILT_IN_USUBL_OVERFLOW
:
9075 case BUILT_IN_USUBLL_OVERFLOW
:
9076 case BUILT_IN_UMUL_OVERFLOW
:
9077 case BUILT_IN_UMULL_OVERFLOW
:
9078 case BUILT_IN_UMULLL_OVERFLOW
:
9079 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9087 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9088 arguments. IGNORE is true if the result of the
9089 function call is ignored. This function returns NULL_TREE if no
9090 simplification was possible. */
9093 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9095 tree ret
= NULL_TREE
;
9100 ret
= fold_builtin_0 (loc
, fndecl
);
9103 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9106 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9109 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9112 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9117 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9118 SET_EXPR_LOCATION (ret
, loc
);
9119 TREE_NO_WARNING (ret
) = 1;
9125 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9126 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9127 of arguments in ARGS to be omitted. OLDNARGS is the number of
9128 elements in ARGS. */
9131 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9132 int skip
, tree fndecl
, int n
, va_list newargs
)
9134 int nargs
= oldnargs
- skip
+ n
;
9141 buffer
= XALLOCAVEC (tree
, nargs
);
9142 for (i
= 0; i
< n
; i
++)
9143 buffer
[i
] = va_arg (newargs
, tree
);
9144 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9145 buffer
[i
] = args
[j
];
9148 buffer
= args
+ skip
;
9150 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9153 /* Return true if FNDECL shouldn't be folded right now.
9154 If a built-in function has an inline attribute always_inline
9155 wrapper, defer folding it after always_inline functions have
9156 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9157 might not be performed. */
9160 avoid_folding_inline_builtin (tree fndecl
)
9162 return (DECL_DECLARED_INLINE_P (fndecl
)
9163 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9165 && !cfun
->always_inline_functions_inlined
9166 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9169 /* A wrapper function for builtin folding that prevents warnings for
9170 "statement without effect" and the like, caused by removing the
9171 call node earlier than the warning is generated. */
9174 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9176 tree ret
= NULL_TREE
;
9177 tree fndecl
= get_callee_fndecl (exp
);
9179 && TREE_CODE (fndecl
) == FUNCTION_DECL
9180 && DECL_BUILT_IN (fndecl
)
9181 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9182 yet. Defer folding until we see all the arguments
9183 (after inlining). */
9184 && !CALL_EXPR_VA_ARG_PACK (exp
))
9186 int nargs
= call_expr_nargs (exp
);
9188 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9189 instead last argument is __builtin_va_arg_pack (). Defer folding
9190 even in that case, until arguments are finalized. */
9191 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9193 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9195 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9196 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9197 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9201 if (avoid_folding_inline_builtin (fndecl
))
9204 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9205 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9206 CALL_EXPR_ARGP (exp
), ignore
);
9209 tree
*args
= CALL_EXPR_ARGP (exp
);
9210 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9218 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9219 N arguments are passed in the array ARGARRAY. Return a folded
9220 expression or NULL_TREE if no simplification was possible. */
9223 fold_builtin_call_array (location_t loc
, tree
,
9228 if (TREE_CODE (fn
) != ADDR_EXPR
)
9231 tree fndecl
= TREE_OPERAND (fn
, 0);
9232 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9233 && DECL_BUILT_IN (fndecl
))
9235 /* If last argument is __builtin_va_arg_pack (), arguments to this
9236 function are not finalized yet. Defer folding until they are. */
9237 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9239 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9241 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9242 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9243 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9246 if (avoid_folding_inline_builtin (fndecl
))
9248 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9249 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9251 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9257 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9258 along with N new arguments specified as the "..." parameters. SKIP
9259 is the number of arguments in EXP to be omitted. This function is used
9260 to do varargs-to-varargs transformations. */
9263 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9269 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9270 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9276 /* Validate a single argument ARG against a tree code CODE representing
9277 a type. Return true when argument is valid. */
9280 validate_arg (const_tree arg
, enum tree_code code
)
9284 else if (code
== POINTER_TYPE
)
9285 return POINTER_TYPE_P (TREE_TYPE (arg
));
9286 else if (code
== INTEGER_TYPE
)
9287 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9288 return code
== TREE_CODE (TREE_TYPE (arg
));
9291 /* This function validates the types of a function call argument list
9292 against a specified list of tree_codes. If the last specifier is a 0,
9293 that represents an ellipses, otherwise the last specifier must be a
9296 This is the GIMPLE version of validate_arglist. Eventually we want to
9297 completely convert builtins.c to work from GIMPLEs and the tree based
9298 validate_arglist will then be removed. */
9301 validate_gimple_arglist (const gcall
*call
, ...)
9303 enum tree_code code
;
9309 va_start (ap
, call
);
9314 code
= (enum tree_code
) va_arg (ap
, int);
9318 /* This signifies an ellipses, any further arguments are all ok. */
9322 /* This signifies an endlink, if no arguments remain, return
9323 true, otherwise return false. */
9324 res
= (i
== gimple_call_num_args (call
));
9327 /* If no parameters remain or the parameter's code does not
9328 match the specified code, return false. Otherwise continue
9329 checking any remaining arguments. */
9330 arg
= gimple_call_arg (call
, i
++);
9331 if (!validate_arg (arg
, code
))
9338 /* We need gotos here since we can only have one VA_CLOSE in a
9346 /* Default target-specific builtin expander that does nothing. */
9349 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9350 rtx target ATTRIBUTE_UNUSED
,
9351 rtx subtarget ATTRIBUTE_UNUSED
,
9352 machine_mode mode ATTRIBUTE_UNUSED
,
9353 int ignore ATTRIBUTE_UNUSED
)
9358 /* Returns true is EXP represents data that would potentially reside
9359 in a readonly section. */
9362 readonly_data_expr (tree exp
)
9366 if (TREE_CODE (exp
) != ADDR_EXPR
)
9369 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9373 /* Make sure we call decl_readonly_section only for trees it
9374 can handle (since it returns true for everything it doesn't
9376 if (TREE_CODE (exp
) == STRING_CST
9377 || TREE_CODE (exp
) == CONSTRUCTOR
9378 || (VAR_P (exp
) && TREE_STATIC (exp
)))
9379 return decl_readonly_section (exp
, 0);
9384 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9385 to the call, and TYPE is its return type.
9387 Return NULL_TREE if no simplification was possible, otherwise return the
9388 simplified form of the call as a tree.
9390 The simplified form may be a constant or other expression which
9391 computes the same value, but in a more efficient manner (including
9392 calls to other builtin functions).
9394 The call may contain arguments which need to be evaluated, but
9395 which are not useful to determine the result of the call. In
9396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9397 COMPOUND_EXPR will be an argument which must be evaluated.
9398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9399 COMPOUND_EXPR in the chain will contain the tree for the simplified
9400 form of the builtin function call. */
9403 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
9405 if (!validate_arg (s1
, POINTER_TYPE
)
9406 || !validate_arg (s2
, POINTER_TYPE
))
9411 const char *p1
, *p2
;
9420 const char *r
= strpbrk (p1
, p2
);
9424 return build_int_cst (TREE_TYPE (s1
), 0);
9426 /* Return an offset into the constant string argument. */
9427 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9428 return fold_convert_loc (loc
, type
, tem
);
9432 /* strpbrk(x, "") == NULL.
9433 Evaluate and ignore s1 in case it had side-effects. */
9434 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
9437 return NULL_TREE
; /* Really call strpbrk. */
9439 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9443 /* New argument list transforming strpbrk(s1, s2) to
9444 strchr(s1, s2[0]). */
9445 return build_call_expr_loc (loc
, fn
, 2, s1
,
9446 build_int_cst (integer_type_node
, p2
[0]));
9450 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9453 Return NULL_TREE if no simplification was possible, otherwise return the
9454 simplified form of the call as a tree.
9456 The simplified form may be a constant or other expression which
9457 computes the same value, but in a more efficient manner (including
9458 calls to other builtin functions).
9460 The call may contain arguments which need to be evaluated, but
9461 which are not useful to determine the result of the call. In
9462 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9463 COMPOUND_EXPR will be an argument which must be evaluated.
9464 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9465 COMPOUND_EXPR in the chain will contain the tree for the simplified
9466 form of the builtin function call. */
9469 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
9471 if (!validate_arg (s1
, POINTER_TYPE
)
9472 || !validate_arg (s2
, POINTER_TYPE
))
9476 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9478 /* If either argument is "", return NULL_TREE. */
9479 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9480 /* Evaluate and ignore both arguments in case either one has
9482 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9488 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9491 Return NULL_TREE if no simplification was possible, otherwise return the
9492 simplified form of the call as a tree.
9494 The simplified form may be a constant or other expression which
9495 computes the same value, but in a more efficient manner (including
9496 calls to other builtin functions).
9498 The call may contain arguments which need to be evaluated, but
9499 which are not useful to determine the result of the call. In
9500 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9501 COMPOUND_EXPR will be an argument which must be evaluated.
9502 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9503 COMPOUND_EXPR in the chain will contain the tree for the simplified
9504 form of the builtin function call. */
9507 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9509 if (!validate_arg (s1
, POINTER_TYPE
)
9510 || !validate_arg (s2
, POINTER_TYPE
))
9514 /* If the first argument is "", return NULL_TREE. */
9515 const char *p1
= c_getstr (s1
);
9516 if (p1
&& *p1
== '\0')
9518 /* Evaluate and ignore argument s2 in case it has
9520 return omit_one_operand_loc (loc
, size_type_node
,
9521 size_zero_node
, s2
);
9524 /* If the second argument is "", return __builtin_strlen(s1). */
9525 const char *p2
= c_getstr (s2
);
9526 if (p2
&& *p2
== '\0')
9528 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9530 /* If the replacement _DECL isn't initialized, don't do the
9535 return build_call_expr_loc (loc
, fn
, 1, s1
);
9541 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9542 produced. False otherwise. This is done so that we don't output the error
9543 or warning twice or three times. */
9546 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9548 tree fntype
= TREE_TYPE (current_function_decl
);
9549 int nargs
= call_expr_nargs (exp
);
9551 /* There is good chance the current input_location points inside the
9552 definition of the va_start macro (perhaps on the token for
9553 builtin) in a system header, so warnings will not be emitted.
9554 Use the location in real source code. */
9555 source_location current_location
=
9556 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9559 if (!stdarg_p (fntype
))
9561 error ("%<va_start%> used in function with fixed args");
9567 if (va_start_p
&& (nargs
!= 2))
9569 error ("wrong number of arguments to function %<va_start%>");
9572 arg
= CALL_EXPR_ARG (exp
, 1);
9574 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9575 when we checked the arguments and if needed issued a warning. */
9580 /* Evidently an out of date version of <stdarg.h>; can't validate
9581 va_start's second argument, but can still work as intended. */
9582 warning_at (current_location
,
9584 "%<__builtin_next_arg%> called without an argument");
9589 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9592 arg
= CALL_EXPR_ARG (exp
, 0);
9595 if (TREE_CODE (arg
) == SSA_NAME
)
9596 arg
= SSA_NAME_VAR (arg
);
9598 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9599 or __builtin_next_arg (0) the first time we see it, after checking
9600 the arguments and if needed issuing a warning. */
9601 if (!integer_zerop (arg
))
9603 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9605 /* Strip off all nops for the sake of the comparison. This
9606 is not quite the same as STRIP_NOPS. It does more.
9607 We must also strip off INDIRECT_EXPR for C++ reference
9609 while (CONVERT_EXPR_P (arg
)
9610 || TREE_CODE (arg
) == INDIRECT_REF
)
9611 arg
= TREE_OPERAND (arg
, 0);
9612 if (arg
!= last_parm
)
9614 /* FIXME: Sometimes with the tree optimizers we can get the
9615 not the last argument even though the user used the last
9616 argument. We just warn and set the arg to be the last
9617 argument so that we will get wrong-code because of
9619 warning_at (current_location
,
9621 "second parameter of %<va_start%> not last named argument");
9624 /* Undefined by C99 7.15.1.4p4 (va_start):
9625 "If the parameter parmN is declared with the register storage
9626 class, with a function or array type, or with a type that is
9627 not compatible with the type that results after application of
9628 the default argument promotions, the behavior is undefined."
9630 else if (DECL_REGISTER (arg
))
9632 warning_at (current_location
,
9634 "undefined behavior when second parameter of "
9635 "%<va_start%> is declared with %<register%> storage");
9638 /* We want to verify the second parameter just once before the tree
9639 optimizers are run and then avoid keeping it in the tree,
9640 as otherwise we could warn even for correct code like:
9641 void foo (int i, ...)
9642 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9644 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9646 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9652 /* Expand a call EXP to __builtin_object_size. */
9655 expand_builtin_object_size (tree exp
)
9658 int object_size_type
;
9659 tree fndecl
= get_callee_fndecl (exp
);
9661 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9663 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9665 expand_builtin_trap ();
9669 ost
= CALL_EXPR_ARG (exp
, 1);
9672 if (TREE_CODE (ost
) != INTEGER_CST
9673 || tree_int_cst_sgn (ost
) < 0
9674 || compare_tree_int (ost
, 3) > 0)
9676 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9678 expand_builtin_trap ();
9682 object_size_type
= tree_to_shwi (ost
);
9684 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9687 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9688 FCODE is the BUILT_IN_* to use.
9689 Return NULL_RTX if we failed; the caller should emit a normal call,
9690 otherwise try to get the result in TARGET, if convenient (and in
9691 mode MODE if that's convenient). */
9694 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9695 enum built_in_function fcode
)
9697 tree dest
, src
, len
, size
;
9699 if (!validate_arglist (exp
,
9701 fcode
== BUILT_IN_MEMSET_CHK
9702 ? INTEGER_TYPE
: POINTER_TYPE
,
9703 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9706 dest
= CALL_EXPR_ARG (exp
, 0);
9707 src
= CALL_EXPR_ARG (exp
, 1);
9708 len
= CALL_EXPR_ARG (exp
, 2);
9709 size
= CALL_EXPR_ARG (exp
, 3);
9711 bool sizes_ok
= check_sizes (OPT_Wstringop_overflow_
,
9712 exp
, len
, /*maxlen=*/NULL_TREE
,
9713 /*str=*/NULL_TREE
, size
);
9715 if (!tree_fits_uhwi_p (size
))
9718 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9720 /* Avoid transforming the checking call to an ordinary one when
9721 an overflow has been detected or when the call couldn't be
9722 validated because the size is not constant. */
9723 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9726 tree fn
= NULL_TREE
;
9727 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9728 mem{cpy,pcpy,move,set} is available. */
9731 case BUILT_IN_MEMCPY_CHK
:
9732 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9734 case BUILT_IN_MEMPCPY_CHK
:
9735 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9737 case BUILT_IN_MEMMOVE_CHK
:
9738 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9740 case BUILT_IN_MEMSET_CHK
:
9741 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9750 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9751 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9752 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9753 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9755 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9759 unsigned int dest_align
= get_pointer_alignment (dest
);
9761 /* If DEST is not a pointer type, call the normal function. */
9762 if (dest_align
== 0)
9765 /* If SRC and DEST are the same (and not volatile), do nothing. */
9766 if (operand_equal_p (src
, dest
, 0))
9770 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9772 /* Evaluate and ignore LEN in case it has side-effects. */
9773 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9774 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9777 expr
= fold_build_pointer_plus (dest
, len
);
9778 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9781 /* __memmove_chk special case. */
9782 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9784 unsigned int src_align
= get_pointer_alignment (src
);
9789 /* If src is categorized for a readonly section we can use
9790 normal __memcpy_chk. */
9791 if (readonly_data_expr (src
))
9793 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
9796 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
9797 dest
, src
, len
, size
);
9798 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9799 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9800 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9807 /* Emit warning if a buffer overflow is detected at compile time. */
9810 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
9812 /* The source string. */
9813 tree srcstr
= NULL_TREE
;
9814 /* The size of the destination object. */
9815 tree objsize
= NULL_TREE
;
9816 /* The string that is being concatenated with (as in __strcat_chk)
9817 or null if it isn't. */
9818 tree catstr
= NULL_TREE
;
9819 /* The maximum length of the source sequence in a bounded operation
9820 (such as __strncat_chk) or null if the operation isn't bounded
9821 (such as __strcat_chk). */
9822 tree maxlen
= NULL_TREE
;
9826 case BUILT_IN_STRCPY_CHK
:
9827 case BUILT_IN_STPCPY_CHK
:
9828 srcstr
= CALL_EXPR_ARG (exp
, 1);
9829 objsize
= CALL_EXPR_ARG (exp
, 2);
9832 case BUILT_IN_STRCAT_CHK
:
9833 /* For __strcat_chk the warning will be emitted only if overflowing
9834 by at least strlen (dest) + 1 bytes. */
9835 catstr
= CALL_EXPR_ARG (exp
, 0);
9836 srcstr
= CALL_EXPR_ARG (exp
, 1);
9837 objsize
= CALL_EXPR_ARG (exp
, 2);
9840 case BUILT_IN_STRNCAT_CHK
:
9841 catstr
= CALL_EXPR_ARG (exp
, 0);
9842 srcstr
= CALL_EXPR_ARG (exp
, 1);
9843 maxlen
= CALL_EXPR_ARG (exp
, 2);
9844 objsize
= CALL_EXPR_ARG (exp
, 3);
9847 case BUILT_IN_STRNCPY_CHK
:
9848 case BUILT_IN_STPNCPY_CHK
:
9849 srcstr
= CALL_EXPR_ARG (exp
, 1);
9850 maxlen
= CALL_EXPR_ARG (exp
, 2);
9851 objsize
= CALL_EXPR_ARG (exp
, 3);
9854 case BUILT_IN_SNPRINTF_CHK
:
9855 case BUILT_IN_VSNPRINTF_CHK
:
9856 maxlen
= CALL_EXPR_ARG (exp
, 1);
9857 objsize
= CALL_EXPR_ARG (exp
, 3);
9863 if (catstr
&& maxlen
)
9865 /* Check __strncat_chk. There is no way to determine the length
9866 of the string to which the source string is being appended so
9867 just warn when the length of the source string is not known. */
9868 check_strncat_sizes (exp
, objsize
);
9872 check_sizes (OPT_Wstringop_overflow_
, exp
,
9873 /*size=*/NULL_TREE
, maxlen
, srcstr
, objsize
);
9876 /* Emit warning if a buffer overflow is detected at compile time
9877 in __sprintf_chk/__vsprintf_chk calls. */
9880 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
9882 tree size
, len
, fmt
;
9883 const char *fmt_str
;
9884 int nargs
= call_expr_nargs (exp
);
9886 /* Verify the required arguments in the original call. */
9890 size
= CALL_EXPR_ARG (exp
, 2);
9891 fmt
= CALL_EXPR_ARG (exp
, 3);
9893 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
9896 /* Check whether the format is a literal string constant. */
9897 fmt_str
= c_getstr (fmt
);
9898 if (fmt_str
== NULL
)
9901 if (!init_target_chars ())
9904 /* If the format doesn't contain % args or %%, we know its size. */
9905 if (strchr (fmt_str
, target_percent
) == 0)
9906 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
9907 /* If the format is "%s" and first ... argument is a string literal,
9909 else if (fcode
== BUILT_IN_SPRINTF_CHK
9910 && strcmp (fmt_str
, target_percent_s
) == 0)
9916 arg
= CALL_EXPR_ARG (exp
, 4);
9917 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
9920 len
= c_strlen (arg
, 1);
9921 if (!len
|| ! tree_fits_uhwi_p (len
))
9927 /* Add one for the terminating nul. */
9928 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
9929 check_sizes (OPT_Wstringop_overflow_
,
9930 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, len
, size
);
9933 /* Emit warning if a free is called with address of a variable. */
9936 maybe_emit_free_warning (tree exp
)
9938 tree arg
= CALL_EXPR_ARG (exp
, 0);
9941 if (TREE_CODE (arg
) != ADDR_EXPR
)
9944 arg
= get_base_address (TREE_OPERAND (arg
, 0));
9945 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
9948 if (SSA_VAR_P (arg
))
9949 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9950 "%Kattempt to free a non-heap object %qD", exp
, arg
);
9952 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9953 "%Kattempt to free a non-heap object", exp
);
9956 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9960 fold_builtin_object_size (tree ptr
, tree ost
)
9962 unsigned HOST_WIDE_INT bytes
;
9963 int object_size_type
;
9965 if (!validate_arg (ptr
, POINTER_TYPE
)
9966 || !validate_arg (ost
, INTEGER_TYPE
))
9971 if (TREE_CODE (ost
) != INTEGER_CST
9972 || tree_int_cst_sgn (ost
) < 0
9973 || compare_tree_int (ost
, 3) > 0)
9976 object_size_type
= tree_to_shwi (ost
);
9978 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9979 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9980 and (size_t) 0 for types 2 and 3. */
9981 if (TREE_SIDE_EFFECTS (ptr
))
9982 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
9984 if (TREE_CODE (ptr
) == ADDR_EXPR
)
9986 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
9987 if (wi::fits_to_tree_p (bytes
, size_type_node
))
9988 return build_int_cstu (size_type_node
, bytes
);
9990 else if (TREE_CODE (ptr
) == SSA_NAME
)
9992 /* If object size is not known yet, delay folding until
9993 later. Maybe subsequent passes will help determining
9995 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
9996 && wi::fits_to_tree_p (bytes
, size_type_node
))
9997 return build_int_cstu (size_type_node
, bytes
);
10003 /* Builtins with folding operations that operate on "..." arguments
10004 need special handling; we need to store the arguments in a convenient
10005 data structure before attempting any folding. Fortunately there are
10006 only a few builtins that fall into this category. FNDECL is the
10007 function, EXP is the CALL_EXPR for the call. */
10010 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10012 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10013 tree ret
= NULL_TREE
;
10017 case BUILT_IN_FPCLASSIFY
:
10018 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10026 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10027 SET_EXPR_LOCATION (ret
, loc
);
10028 TREE_NO_WARNING (ret
) = 1;
10034 /* Initialize format string characters in the target charset. */
10037 init_target_chars (void)
10042 target_newline
= lang_hooks
.to_target_charset ('\n');
10043 target_percent
= lang_hooks
.to_target_charset ('%');
10044 target_c
= lang_hooks
.to_target_charset ('c');
10045 target_s
= lang_hooks
.to_target_charset ('s');
10046 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10050 target_percent_c
[0] = target_percent
;
10051 target_percent_c
[1] = target_c
;
10052 target_percent_c
[2] = '\0';
10054 target_percent_s
[0] = target_percent
;
10055 target_percent_s
[1] = target_s
;
10056 target_percent_s
[2] = '\0';
10058 target_percent_s_newline
[0] = target_percent
;
10059 target_percent_s_newline
[1] = target_s
;
10060 target_percent_s_newline
[2] = target_newline
;
10061 target_percent_s_newline
[3] = '\0';
10068 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10069 and no overflow/underflow occurred. INEXACT is true if M was not
10070 exactly calculated. TYPE is the tree type for the result. This
10071 function assumes that you cleared the MPFR flags and then
10072 calculated M to see if anything subsequently set a flag prior to
10073 entering this function. Return NULL_TREE if any checks fail. */
10076 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10078 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10079 overflow/underflow occurred. If -frounding-math, proceed iff the
10080 result of calling FUNC was exact. */
10081 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10082 && (!flag_rounding_math
|| !inexact
))
10084 REAL_VALUE_TYPE rr
;
10086 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10087 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10088 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10089 but the mpft_t is not, then we underflowed in the
10091 if (real_isfinite (&rr
)
10092 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10094 REAL_VALUE_TYPE rmode
;
10096 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10097 /* Proceed iff the specified mode can hold the value. */
10098 if (real_identical (&rmode
, &rr
))
10099 return build_real (type
, rmode
);
10105 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10106 number and no overflow/underflow occurred. INEXACT is true if M
10107 was not exactly calculated. TYPE is the tree type for the result.
10108 This function assumes that you cleared the MPFR flags and then
10109 calculated M to see if anything subsequently set a flag prior to
10110 entering this function. Return NULL_TREE if any checks fail, if
10111 FORCE_CONVERT is true, then bypass the checks. */
10114 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10116 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10117 overflow/underflow occurred. If -frounding-math, proceed iff the
10118 result of calling FUNC was exact. */
10120 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10121 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10122 && (!flag_rounding_math
|| !inexact
)))
10124 REAL_VALUE_TYPE re
, im
;
10126 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10127 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10128 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10129 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10130 but the mpft_t is not, then we underflowed in the
10133 || (real_isfinite (&re
) && real_isfinite (&im
)
10134 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10135 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10137 REAL_VALUE_TYPE re_mode
, im_mode
;
10139 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10140 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10141 /* Proceed iff the specified mode can hold the value. */
10143 || (real_identical (&re_mode
, &re
)
10144 && real_identical (&im_mode
, &im
)))
10145 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10146 build_real (TREE_TYPE (type
), im_mode
));
10152 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10153 the pointer *(ARG_QUO) and return the result. The type is taken
10154 from the type of ARG0 and is used for setting the precision of the
10155 calculation and results. */
10158 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10160 tree
const type
= TREE_TYPE (arg0
);
10161 tree result
= NULL_TREE
;
10166 /* To proceed, MPFR must exactly represent the target floating point
10167 format, which only happens when the target base equals two. */
10168 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10169 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10170 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10172 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10173 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10175 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10177 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10178 const int prec
= fmt
->p
;
10179 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10184 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10185 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10186 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10187 mpfr_clear_flags ();
10188 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10189 /* Remquo is independent of the rounding mode, so pass
10190 inexact=0 to do_mpfr_ckconv(). */
10191 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10192 mpfr_clears (m0
, m1
, NULL
);
10195 /* MPFR calculates quo in the host's long so it may
10196 return more bits in quo than the target int can hold
10197 if sizeof(host long) > sizeof(target int). This can
10198 happen even for native compilers in LP64 mode. In
10199 these cases, modulo the quo value with the largest
10200 number that the target int can hold while leaving one
10201 bit for the sign. */
10202 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10203 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10205 /* Dereference the quo pointer argument. */
10206 arg_quo
= build_fold_indirect_ref (arg_quo
);
10207 /* Proceed iff a valid pointer type was passed in. */
10208 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10210 /* Set the value. */
10212 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10213 build_int_cst (TREE_TYPE (arg_quo
),
10215 TREE_SIDE_EFFECTS (result_quo
) = 1;
10216 /* Combine the quo assignment with the rem. */
10217 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10218 result_quo
, result_rem
));
10226 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10227 resulting value as a tree with type TYPE. The mpfr precision is
10228 set to the precision of TYPE. We assume that this mpfr function
10229 returns zero if the result could be calculated exactly within the
10230 requested precision. In addition, the integer pointer represented
10231 by ARG_SG will be dereferenced and set to the appropriate signgam
10235 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10237 tree result
= NULL_TREE
;
10241 /* To proceed, MPFR must exactly represent the target floating point
10242 format, which only happens when the target base equals two. Also
10243 verify ARG is a constant and that ARG_SG is an int pointer. */
10244 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10245 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10246 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10247 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10249 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10251 /* In addition to NaN and Inf, the argument cannot be zero or a
10252 negative integer. */
10253 if (real_isfinite (ra
)
10254 && ra
->cl
!= rvc_zero
10255 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10257 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10258 const int prec
= fmt
->p
;
10259 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10264 mpfr_init2 (m
, prec
);
10265 mpfr_from_real (m
, ra
, GMP_RNDN
);
10266 mpfr_clear_flags ();
10267 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10268 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10274 /* Dereference the arg_sg pointer argument. */
10275 arg_sg
= build_fold_indirect_ref (arg_sg
);
10276 /* Assign the signgam value into *arg_sg. */
10277 result_sg
= fold_build2 (MODIFY_EXPR
,
10278 TREE_TYPE (arg_sg
), arg_sg
,
10279 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10280 TREE_SIDE_EFFECTS (result_sg
) = 1;
10281 /* Combine the signgam assignment with the lgamma result. */
10282 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10283 result_sg
, result_lg
));
10291 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10292 mpc function FUNC on it and return the resulting value as a tree
10293 with type TYPE. The mpfr precision is set to the precision of
10294 TYPE. We assume that function FUNC returns zero if the result
10295 could be calculated exactly within the requested precision. If
10296 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10297 in the arguments and/or results. */
10300 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10301 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10303 tree result
= NULL_TREE
;
10308 /* To proceed, MPFR must exactly represent the target floating point
10309 format, which only happens when the target base equals two. */
10310 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10311 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10312 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10313 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10314 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10316 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10317 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10318 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10319 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10322 || (real_isfinite (re0
) && real_isfinite (im0
)
10323 && real_isfinite (re1
) && real_isfinite (im1
)))
10325 const struct real_format
*const fmt
=
10326 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10327 const int prec
= fmt
->p
;
10328 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10329 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10333 mpc_init2 (m0
, prec
);
10334 mpc_init2 (m1
, prec
);
10335 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10336 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10337 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10338 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10339 mpfr_clear_flags ();
10340 inexact
= func (m0
, m0
, m1
, crnd
);
10341 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
10350 /* A wrapper function for builtin folding that prevents warnings for
10351 "statement without effect" and the like, caused by removing the
10352 call node earlier than the warning is generated. */
10355 fold_call_stmt (gcall
*stmt
, bool ignore
)
10357 tree ret
= NULL_TREE
;
10358 tree fndecl
= gimple_call_fndecl (stmt
);
10359 location_t loc
= gimple_location (stmt
);
10361 && TREE_CODE (fndecl
) == FUNCTION_DECL
10362 && DECL_BUILT_IN (fndecl
)
10363 && !gimple_call_va_arg_pack_p (stmt
))
10365 int nargs
= gimple_call_num_args (stmt
);
10366 tree
*args
= (nargs
> 0
10367 ? gimple_call_arg_ptr (stmt
, 0)
10368 : &error_mark_node
);
10370 if (avoid_folding_inline_builtin (fndecl
))
10372 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10374 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
10378 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10381 /* Propagate location information from original call to
10382 expansion of builtin. Otherwise things like
10383 maybe_emit_chk_warning, that operate on the expansion
10384 of a builtin, will use the wrong location information. */
10385 if (gimple_has_location (stmt
))
10387 tree realret
= ret
;
10388 if (TREE_CODE (ret
) == NOP_EXPR
)
10389 realret
= TREE_OPERAND (ret
, 0);
10390 if (CAN_HAVE_LOCATION_P (realret
)
10391 && !EXPR_HAS_LOCATION (realret
))
10392 SET_EXPR_LOCATION (realret
, loc
);
10402 /* Look up the function in builtin_decl that corresponds to DECL
10403 and set ASMSPEC as its user assembler name. DECL must be a
10404 function decl that declares a builtin. */
10407 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
10409 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
10410 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
10413 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
10414 set_user_assembler_name (builtin
, asmspec
);
10416 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
10417 && INT_TYPE_SIZE
< BITS_PER_WORD
)
10419 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
10420 set_user_assembler_libfunc ("ffs", asmspec
);
10421 set_optab_libfunc (ffs_optab
, mode
, "ffs");
10425 /* Return true if DECL is a builtin that expands to a constant or similarly
10428 is_simple_builtin (tree decl
)
10430 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10431 switch (DECL_FUNCTION_CODE (decl
))
10433 /* Builtins that expand to constants. */
10434 case BUILT_IN_CONSTANT_P
:
10435 case BUILT_IN_EXPECT
:
10436 case BUILT_IN_OBJECT_SIZE
:
10437 case BUILT_IN_UNREACHABLE
:
10438 /* Simple register moves or loads from stack. */
10439 case BUILT_IN_ASSUME_ALIGNED
:
10440 case BUILT_IN_RETURN_ADDRESS
:
10441 case BUILT_IN_EXTRACT_RETURN_ADDR
:
10442 case BUILT_IN_FROB_RETURN_ADDR
:
10443 case BUILT_IN_RETURN
:
10444 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
10445 case BUILT_IN_FRAME_ADDRESS
:
10446 case BUILT_IN_VA_END
:
10447 case BUILT_IN_STACK_SAVE
:
10448 case BUILT_IN_STACK_RESTORE
:
10449 /* Exception state returns or moves registers around. */
10450 case BUILT_IN_EH_FILTER
:
10451 case BUILT_IN_EH_POINTER
:
10452 case BUILT_IN_EH_COPY_VALUES
:
10462 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10463 most probably expanded inline into reasonably simple code. This is a
10464 superset of is_simple_builtin. */
10466 is_inexpensive_builtin (tree decl
)
10470 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
10472 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10473 switch (DECL_FUNCTION_CODE (decl
))
10476 CASE_BUILT_IN_ALLOCA
:
10477 case BUILT_IN_BSWAP16
:
10478 case BUILT_IN_BSWAP32
:
10479 case BUILT_IN_BSWAP64
:
10481 case BUILT_IN_CLZIMAX
:
10482 case BUILT_IN_CLZL
:
10483 case BUILT_IN_CLZLL
:
10485 case BUILT_IN_CTZIMAX
:
10486 case BUILT_IN_CTZL
:
10487 case BUILT_IN_CTZLL
:
10489 case BUILT_IN_FFSIMAX
:
10490 case BUILT_IN_FFSL
:
10491 case BUILT_IN_FFSLL
:
10492 case BUILT_IN_IMAXABS
:
10493 case BUILT_IN_FINITE
:
10494 case BUILT_IN_FINITEF
:
10495 case BUILT_IN_FINITEL
:
10496 case BUILT_IN_FINITED32
:
10497 case BUILT_IN_FINITED64
:
10498 case BUILT_IN_FINITED128
:
10499 case BUILT_IN_FPCLASSIFY
:
10500 case BUILT_IN_ISFINITE
:
10501 case BUILT_IN_ISINF_SIGN
:
10502 case BUILT_IN_ISINF
:
10503 case BUILT_IN_ISINFF
:
10504 case BUILT_IN_ISINFL
:
10505 case BUILT_IN_ISINFD32
:
10506 case BUILT_IN_ISINFD64
:
10507 case BUILT_IN_ISINFD128
:
10508 case BUILT_IN_ISNAN
:
10509 case BUILT_IN_ISNANF
:
10510 case BUILT_IN_ISNANL
:
10511 case BUILT_IN_ISNAND32
:
10512 case BUILT_IN_ISNAND64
:
10513 case BUILT_IN_ISNAND128
:
10514 case BUILT_IN_ISNORMAL
:
10515 case BUILT_IN_ISGREATER
:
10516 case BUILT_IN_ISGREATEREQUAL
:
10517 case BUILT_IN_ISLESS
:
10518 case BUILT_IN_ISLESSEQUAL
:
10519 case BUILT_IN_ISLESSGREATER
:
10520 case BUILT_IN_ISUNORDERED
:
10521 case BUILT_IN_VA_ARG_PACK
:
10522 case BUILT_IN_VA_ARG_PACK_LEN
:
10523 case BUILT_IN_VA_COPY
:
10524 case BUILT_IN_TRAP
:
10525 case BUILT_IN_SAVEREGS
:
10526 case BUILT_IN_POPCOUNTL
:
10527 case BUILT_IN_POPCOUNTLL
:
10528 case BUILT_IN_POPCOUNTIMAX
:
10529 case BUILT_IN_POPCOUNT
:
10530 case BUILT_IN_PARITYL
:
10531 case BUILT_IN_PARITYLL
:
10532 case BUILT_IN_PARITYIMAX
:
10533 case BUILT_IN_PARITY
:
10534 case BUILT_IN_LABS
:
10535 case BUILT_IN_LLABS
:
10536 case BUILT_IN_PREFETCH
:
10537 case BUILT_IN_ACC_ON_DEVICE
:
10541 return is_simple_builtin (decl
);
10547 /* Return true if T is a constant and the value cast to a target char
10548 can be represented by a host char.
10549 Store the casted char constant in *P if so. */
10552 target_char_cst_p (tree t
, char *p
)
10554 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
10557 *p
= (char)tree_to_uhwi (t
);